From ef93a73b1313f148011965ef7361f667f371f58b Mon Sep 17 00:00:00 2001 From: Tom Keller <1083460+kellertk@users.noreply.github.com> Date: Wed, 23 Aug 2023 16:35:15 -0700 Subject: [PATCH] V3 (#791) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat!: v3 release Squashed commit of the following: commit 9df144b3ae78185cdde6175230510f28093b2fbc Merge: 19d3f3a 131c7b6 Author: peterwoodworth Date: Wed Aug 23 14:41:33 2023 -0700 Merge remote-tracking branch 'origin' into vNext commit 131c7b6fd10c0d7f36e1e49650b241d91ee327b9 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 22 03:12:24 2023 +0000 chore: Bump jest from 29.6.2 to 29.6.3 (#786) Bumps [jest](https://github.com/jestjs/jest/tree/HEAD/packages/jest) from 29.6.2 to 29.6.3. - [Release notes](https://github.com/jestjs/jest/releases) - [Changelog](https://github.com/jestjs/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/jestjs/jest/commits/v29.6.3/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 6334b403ebf4af8bddda86d46ba899570ddd1043 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 22 03:11:45 2023 +0000 chore: Bump aws-sdk from 2.1436.0 to 2.1441.0 (#787) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1436.0 to 2.1441.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1436.0...v2.1441.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 19d3f3a169bca29bbfa58d92770feafec8e62e8d Author: peterwoodworth Date: Fri Aug 18 20:13:39 2023 -0700 chore: update readme and changelog commit 4403e8d882fb36991ed4dcce5bd9b6a8ecaf0369 Author: peterwoodworth Date: Fri Aug 18 17:12:47 2023 -0700 chore: update workflows commit 0f1923f1a1dbc0ab87a454b3445b17ee94947bde Author: peterwoodworth Date: Fri Aug 18 17:01:59 2023 -0700 chore: update .github with current commit 60a9bf1676e5fd13cdef7af0e2992eb6a5ffa44e Author: peterwoodworth Date: Fri Aug 18 17:00:14 2023 -0700 chore: update .github with current commit d3464b25f813afb492c7f853fb489b04cb92c183 Author: peterwoodworth Date: Fri Aug 18 16:58:01 2023 -0700 chore: add examples to vNext branch commit 7cd050ca4dfdb9e7fbea0887016f1e2d5b85f5f6 Author: peterwoodworth Date: Fri Aug 18 16:54:12 2023 -0700 chore: update readme commit 0e32a1377520da6a3d18625b0a0cac69b916635e Author: peterwoodworth Date: Fri Aug 18 16:19:17 2023 -0700 chore: update readme commit 2625b45d566d7050dd914a0ea647b8f7339f3c33 Author: peterwoodworth Date: Fri Aug 18 15:12:36 2023 -0700 chore: update readme commit 19eb4df27e1ffb7008da29363d77b4a99e7a9bf4 Author: peterwoodworth Date: Thu Aug 17 17:22:20 2023 -0700 chore: update readme commit 71ff793a0fc98b8dba54cc60935cde0bf0861438 Author: peterwoodworth Date: Thu Aug 17 17:18:42 2023 -0700 chore: update readme commit a272f95336ef843c58cd66e76addb8c7566f7f4f Author: peterwoodworth Date: Wed Aug 16 17:05:42 2023 -0700 chore: update readme commit 360ea313a73f8d3b7839fb5db64aecf801ad5fda Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 15 03:33:37 2023 +0000 chore: Bump aws-sdk from 2.1431.0 to 2.1436.0 (#783) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1431.0 to 2.1436.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1431.0...v2.1436.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit de93b00c9f538bf2000deda62b2da850b2c737e9 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 15 03:32:58 2023 +0000 chore: Bump eslint from 8.46.0 to 8.47.0 (#784) Bumps [eslint](https://github.com/eslint/eslint) from 8.46.0 to 8.47.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.46.0...v8.47.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 4dd1aeed85e721ab38897f6fcae00ce152e3e9a4 Author: peterwoodworth Date: Fri Aug 11 16:41:44 2023 -0700 chore: remove unused dependency, fixing vulnerability commit 7f4507af3c45c98544e20021190df6f2ce560cd2 Author: peterwoodworth Date: Fri Aug 11 16:24:09 2023 -0700 fix: maxRetry hit infinite loop with negative input commit 746d33e7c1cc7b6e40a836b0f2ef033136aa6b2a Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 8 03:23:17 2023 +0000 chore: Bump aws-sdk from 2.1426.0 to 2.1431.0 (#782) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1426.0 to 2.1431.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1426.0...v2.1431.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 706fc9ae3ddc432f1b3de583922ba05d3b484cae Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 1 03:41:00 2023 +0000 chore: Bump eslint from 8.45.0 to 8.46.0 (#780) Bumps [eslint](https://github.com/eslint/eslint) from 8.45.0 to 8.46.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.45.0...v8.46.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 9918371ad58bc793ab867692ad2935b2ee847bde Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 1 03:39:16 2023 +0000 chore: Bump aws-sdk from 2.1421.0 to 2.1426.0 (#781) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1421.0 to 2.1426.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1421.0...v2.1426.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 345462ab9e5090ddc595fc179952ed2fb03c74b7 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Aug 1 03:38:28 2023 +0000 chore: Bump jest from 29.6.1 to 29.6.2 (#779) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 29.6.1 to 29.6.2. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/jestjs/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.6.2/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 80a7a6c2a2309f1aaa0ea3c8689a33b1647a4421 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 25 03:22:02 2023 +0000 chore: Bump aws-sdk from 2.1416.0 to 2.1421.0 (#778) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1416.0 to 2.1421.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1416.0...v2.1421.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 385d7e1d64dd2bbc3cb58a58ce18ab63f0a0fcb8 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 18 03:25:33 2023 +0000 chore: Bump aws-sdk from 2.1413.0 to 2.1416.0 (#774) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1413.0 to 2.1416.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1413.0...v2.1416.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 48560b5d49c5fdf658ea3c377d4da138363f1d6b Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 18 03:24:46 2023 +0000 chore: Bump eslint from 8.44.0 to 8.45.0 (#773) Bumps [eslint](https://github.com/eslint/eslint) from 8.44.0 to 8.45.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.44.0...v8.45.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 35d8f9588bf190a9a7fda3411cd548fb8234bee2 Author: Shailja Khurana <117320115+khushail@users.noreply.github.com> Date: Fri Jul 14 15:14:02 2023 -0700 added workflow for handling answerable discussions (#772) commit 86df881b31470c87bf594fc74e03243c2f43d0ff Author: Tom Keller <1083460+kellertk@users.noreply.github.com> Date: Wed Jul 12 14:51:43 2023 -0700 chore: add link to CF template in README (#771) commit a621db3279cf7a1183db9dc2aabc4aba6c11d765 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 11 03:16:22 2023 +0000 chore: Bump jest from 29.5.0 to 29.6.1 (#768) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 29.5.0 to 29.6.1. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/jestjs/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.6.1/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 90858a2e70864a512a33af490a9f9e42b3ff41ff Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 11 03:15:36 2023 +0000 chore: Bump aws-sdk from 2.1410.0 to 2.1413.0 (#767) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1410.0 to 2.1413.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1410.0...v2.1413.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 228a9916ee92ff4ab98a8619da45bb76261b4a93 Author: Tom Keller <1083460+kellertk@users.noreply.github.com> Date: Fri Jul 7 16:49:10 2023 -0700 Update README.md for OIDC changes (#764) commit 65c2143642ea7b3c859f1340587377785edcacd6 Author: peterwoodworth Date: Thu Jul 6 15:19:05 2023 -0700 chore: add final tests, add outputs to action.yml commit 7b893ba14bef5ccd014073d438444b85c3113fee Author: peterwoodworth Date: Wed Jul 5 18:55:04 2023 -0700 feat: getIDToken retry, feat: special character in key retry commit 3c981da079c1adfee00f5067fc0659875b5c1253 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 4 03:30:19 2023 +0000 chore: Bump eslint from 8.43.0 to 8.44.0 (#761) Bumps [eslint](https://github.com/eslint/eslint) from 8.43.0 to 8.44.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.43.0...v8.44.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 222080786b74f0214f8927f524bdad32f19aba35 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jul 4 03:29:03 2023 +0000 chore: Bump aws-sdk from 2.1405.0 to 2.1410.0 (#760) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1405.0 to 2.1410.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1405.0...v2.1410.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit aa2675f083c4ad1cc14a10817118e4f34ffd91d0 Author: peterwoodworth Date: Mon Jul 3 16:13:19 2023 -0700 implement last changes for vNext commit 256d740e7107a9b9ac6782a3b2ba5e49dbc6943f Author: peterwoodworth Date: Fri Jun 30 16:20:47 2023 -0700 chore: remove unnecessary step in unit test job commit cd5cd29d7fc559c4d70781db07660bc13e776a57 Author: peterwoodworth Date: Fri Jun 30 16:17:47 2023 -0700 chore: declare eol in gitattributes commit 7b90497500b8bc930dbb50d6ff06069af7f75888 Author: peterwoodworth Date: Fri Jun 30 16:11:12 2023 -0700 chore: disable auto crlf in unit tests commit fb67439785b46cf38d18961980a5de864358e9fe Author: peterwoodworth Date: Fri Jun 30 15:48:15 2023 -0700 fix boolean input commit 2b32a8f2c6a2dc0a4e57e9c6483bb628fce5e737 Author: peterwoodworth Date: Fri Jun 30 15:40:41 2023 -0700 fix webIdentityTokenFile option, rearrange validation logic commit 7526948ef9bf1efb9a8176aaa65dc37dbb3eabd1 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Thu Jun 29 17:29:11 2023 -0700 chore: vnext unit tests (#758) * fix: unit tests * get unit tests running commit 0270d0bcecaf2c76c8fbf7bf3de0d65a6d06e076 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Tue Jun 27 14:22:02 2023 -0700 chore: update readme with new thumbprint (#755) commit 4b8efb08db0b00b6bc841e46bd1fad2dfc1bd32f Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 27 03:58:21 2023 +0000 chore: Bump aws-sdk from 2.1400.0 to 2.1405.0 (#754) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1400.0 to 2.1405.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1400.0...v2.1405.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit d26f2d03f801d2fb30ea6ec3f62371b53361303a Author: peterwoodworth Date: Wed Jun 21 16:45:34 2023 -0700 feat: role-chaining commit 20f59875fe08cacf367fd280bbc9c23184f760f9 Author: peterwoodworth Date: Wed Jun 21 15:13:42 2023 -0700 feat: optional policy inputs when assuming role commit 10024331f6c897222a7f8a5230c8e8540be7ddd2 Author: peterwoodworth Date: Wed Jun 21 15:08:37 2023 -0700 feat: optional policy inputs when assuming role commit 2cee35f9a95b5a4eff614d995c2718165f600644 Author: peterwoodworth Date: Wed Jun 21 14:51:50 2023 -0700 feat: optional policy inputs when assuming role commit 8aa25a5cb6c49fb073aab5190da3a13b10101e3f Author: peterwoodworth Date: Wed Jun 21 14:50:04 2023 -0700 feat: optional policy inputs when assuming role commit e849bae7178b8dee3762d4486f61d680946a5b34 Author: peterwoodworth Date: Tue Jun 20 18:02:34 2023 -0700 chore: remove windows self-hosted runner commit 8a5ae330ddf37d23eea8465f65c8bef569024b9b Author: peterwoodworth Date: Tue Jun 20 16:10:30 2023 -0700 chore: expand integ tests commit 819a6d1c74e0b9bdc395af3baef9eb47d98a62f6 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 20 03:58:58 2023 +0000 chore: Bump eslint from 8.42.0 to 8.43.0 (#750) Bumps [eslint](https://github.com/eslint/eslint) from 8.42.0 to 8.43.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.42.0...v8.43.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit e57babb5418419d9e2f37721afddcdfcb8112e63 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 20 03:58:19 2023 +0000 chore: Bump aws-sdk from 2.1396.0 to 2.1400.0 (#749) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1396.0 to 2.1400.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1396.0...v2.1400.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit c613996f9a8c7d2129e3c96c9d290321c0970227 Author: peterwoodworth Date: Mon Jun 19 18:38:44 2023 -0700 chore: attempting to get to minimal integ test success commit c13a9c8ef3ee09ca026f3cfbee20e77be680d9c3 Author: peterwoodworth Date: Mon Jun 19 18:36:40 2023 -0700 chore: attempting to get to minimal integ test success commit b98e10c7767b4621e17675c4f82087e1fe3fa582 Author: peterwoodworth Date: Mon Jun 19 18:25:03 2023 -0700 chore: set failing configuration on integ workflow matrix commit 6d193465aecaff821da52de207bb2dadd35075a3 Author: peterwoodworth Date: Mon Jun 19 16:32:19 2023 -0700 chore: remove push workflow trigger commit a8f18666dbddd370a1374297a993ee7e14748beb Author: peterwoodworth Date: Mon Jun 19 16:29:33 2023 -0700 chore: fix workflow file indentation commit 7dec5a88ce1fe635b0bd93795adb4b18f4706307 Author: peterwoodworth Date: Mon Jun 19 16:28:05 2023 -0700 chore: update workflow to be able to trigger commit 0a8178a52a7acde3a8d6ee9e0119a1cae5ecb379 Author: peterwoodworth Date: Mon Jun 19 16:21:45 2023 -0700 chore: update workflow to be able to trigger commit 506b27277d63237176552dc52ab358da5b6364de Author: peterwoodworth Date: Mon Jun 19 16:17:53 2023 -0700 chore: update workflow to be able to trigger commit 5fd3084fc36e372ff1fff382a39b10d03659f355 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Wed Jun 14 15:02:32 2023 -0700 chore: update changelog for release (#745) commit d00f6c6f41fde02a9fd0d469040be6ed0df69e73 Author: Dimitar Date: Wed Jun 14 22:43:04 2023 +0100 Allow inline session policies for assuming role (#739) * Allow to pass inline session policy as a parameter Update the action file Regenerate the dist/ content Add test * Fix typos * Fix stylistic error * Move the inline policy logic to allow assumeRole to use it as well; Update and add tests * Add an option for managed policies * Regenerate the dist/ files * Use multiline input for managed policies * Update readme * Update readme --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit ae734070a03cede77f1d165ce2667c3df28466ec Author: GitHub Actions Date: Tue Jun 13 04:55:26 2023 +0000 chore: Update dist commit 3d528c5f7d1aba85453fd1d7669582d0b8a38f18 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 13 04:54:37 2023 +0000 chore: Bump aws-sdk from 2.1391.0 to 2.1396.0 (#744) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1391.0 to 2.1396.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1391.0...v2.1396.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit e7aae617199995b2b768664e5127a66abed5d399 Author: GitHub Actions Date: Tue Jun 6 03:59:43 2023 +0000 chore: Update dist commit 979ee1f6f4a93594bf98042f7921cf687f756423 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 6 03:59:04 2023 +0000 chore: Bump aws-sdk from 2.1386.0 to 2.1391.0 (#742) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1386.0 to 2.1391.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1386.0...v2.1391.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 184acc09ea65625f627dd54f92df135bade013ae Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jun 6 03:58:21 2023 +0000 chore: Bump eslint from 8.41.0 to 8.42.0 (#741) Bumps [eslint](https://github.com/eslint/eslint) from 8.41.0 to 8.42.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.41.0...v8.42.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 375a690dc0af3921541e5f427167f333d7e85f67 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Wed May 31 14:43:39 2023 -0700 chore: update changelog for v2.1.0 (#738) * chore: update changelog for v2.1.0 * chore: update changelog for v2.1.0 --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 8337ca3433e1716b025580b435b374762892752f Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Wed May 31 14:38:06 2023 -0700 update time to ancient (#729) Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 5727f247b64f324ec403ac56ae05e220fd02b65f Author: GitHub Actions Date: Tue May 30 03:59:04 2023 +0000 chore: Update dist commit de3676366f78e91e43d80b83ababf5b49cb64b31 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 30 03:58:14 2023 +0000 chore: Bump aws-sdk from 2.1382.0 to 2.1386.0 (#736) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1382.0 to 2.1386.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1382.0...v2.1386.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 81fba9ea6ab1ad41093faf96dcba39791f6b7650 Author: escudero89 <532828+escudero89@users.noreply.github.com> Date: Fri May 26 00:10:40 2023 +0200 Update README.md with v2 instead of v1 (#733) commit 8815b6557cafe082774ec49c58d911e53c98849d Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 23 03:59:25 2023 +0000 chore: Bump eslint from 8.40.0 to 8.41.0 (#732) Bumps [eslint](https://github.com/eslint/eslint) from 8.40.0 to 8.41.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.40.0...v8.41.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 3d77523f10e6c4d27d79914ddadd2ffc81cec7b7 Author: GitHub Actions Date: Tue May 23 03:58:44 2023 +0000 chore: Update dist commit 4eb7afece4e865205e777456a1f6fd235d04d7c9 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 23 03:58:06 2023 +0000 chore: Bump aws-sdk from 2.1378.0 to 2.1382.0 (#731) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1378.0 to 2.1382.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1378.0...v2.1382.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit d713a182c5467df241c7509cf48b1c382c46080d Author: GitHub Actions Date: Tue May 16 03:58:29 2023 +0000 chore: Update dist commit 1910078421a2d9f965342f483c70445d963a18bd Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 16 03:57:45 2023 +0000 chore: Bump aws-sdk from 2.1374.0 to 2.1378.0 (#726) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1374.0 to 2.1378.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1374.0...v2.1378.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 580afbba8076b302fad6220e95a3a845f32b165a Author: GitHub Actions Date: Tue May 9 03:59:53 2023 +0000 chore: Update dist commit 1daf2a196784ff56c568bfadb58fa2d068f68ad8 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 9 03:59:14 2023 +0000 chore: Bump aws-sdk from 2.1369.0 to 2.1374.0 (#723) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1369.0 to 2.1374.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1369.0...v2.1374.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 76ad093bd503a78d66f5cebd1de2ff18a9a9fcde Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 9 03:57:57 2023 +0000 chore: Bump eslint from 8.39.0 to 8.40.0 (#722) Bumps [eslint](https://github.com/eslint/eslint) from 8.39.0 to 8.40.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.39.0...v8.40.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 6fbd316fd15f52c3d9f68e7aa06eae4f5699a518 Author: Milo Hyson Date: Fri May 5 18:27:52 2023 -0700 Add role-chaining support (#688) * Add role-chaining support * fix version in readme * minor readme adjustment --------- Co-authored-by: Milo Hyson Co-authored-by: peterwoodworth Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 19f3a6d67f2300788235c77ce0c6244d1fd6a983 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 2 03:59:57 2023 +0000 chore: Bump axios from 1.3.6 to 1.4.0 (#720) Bumps [axios](https://github.com/axios/axios) from 1.3.6 to 1.4.0. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.3.6...v1.4.0) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 96aac45bfc3f8ce9e698aae613392f3370d7fd4a Author: GitHub Actions Date: Tue May 2 03:58:25 2023 +0000 chore: Update dist commit 5e6a26459259cc1313b4a75c5e51382dc9755968 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue May 2 03:57:46 2023 +0000 chore: Bump aws-sdk from 2.1364.0 to 2.1369.0 (#719) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1364.0 to 2.1369.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1364.0...v2.1369.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 82ea2d2853906c3fe78152101e590fa6caeb5f82 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 25 04:01:50 2023 +0000 chore: Bump axios from 1.3.5 to 1.3.6 (#716) Bumps [axios](https://github.com/axios/axios) from 1.3.5 to 1.3.6. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.3.5...v1.3.6) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit e0213bba7a0b72dc2e934ce7b42d8b7539743ddf Author: GitHub Actions Date: Tue Apr 25 03:59:31 2023 +0000 chore: Update dist commit 2beda2fb7c1e149196b478f6458a41194b23733e Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 25 03:58:42 2023 +0000 chore: Bump aws-sdk from 2.1360.0 to 2.1364.0 (#715) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1360.0 to 2.1364.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1360.0...v2.1364.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 0afc5d47c7dbd64214d75fe56300f5a719fa088c Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 25 03:57:35 2023 +0000 chore: Bump eslint from 8.38.0 to 8.39.0 (#714) Bumps [eslint](https://github.com/eslint/eslint) from 8.38.0 to 8.39.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.38.0...v8.39.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 31bd3ae2db62cebd1f7f32dfb55b31145fee0b40 Author: GitHub Actions Date: Tue Apr 18 03:59:23 2023 +0000 chore: Update dist commit 46648cb7728257e1ca94a17e05fca397f909497b Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 18 03:58:27 2023 +0000 chore: Bump aws-sdk from 2.1354.0 to 2.1360.0 (#713) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1354.0 to 2.1360.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1354.0...v2.1360.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 60a5c129d08a5f7e035925bfdf8521e4d0f83a3a Author: Jonathan Lang Date: Wed Apr 12 00:46:13 2023 +0200 Fix Typo in README.md (#707) commit f576cc9d9b10de24860b85ac5ab1277b4b570348 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 11 04:00:40 2023 +0000 chore: Bump axios from 1.3.4 to 1.3.5 (#712) Bumps [axios](https://github.com/axios/axios) from 1.3.4 to 1.3.5. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.3.4...v1.3.5) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 9c17403327ddda52aafe5fc51b49cddb5620455a Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 11 03:59:38 2023 +0000 chore: Bump eslint from 8.35.0 to 8.38.0 (#711) Bumps [eslint](https://github.com/eslint/eslint) from 8.35.0 to 8.38.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.35.0...v8.38.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 9c1a10e79cdf0d8ebe5e7f575303b941ccc542b8 Author: GitHub Actions Date: Tue Apr 11 03:58:34 2023 +0000 chore: Update dist commit 9192d9ecb4359801d6a567652a3b495fd010a29e Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Apr 11 03:57:48 2023 +0000 chore: Bump aws-sdk from 2.1329.0 to 2.1354.0 (#710) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1329.0 to 2.1354.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1329.0...v2.1354.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 26227bfa7d973c70c9813eff8c90d40d7ef0ebb1 Author: peterwoodworth Date: Wed Mar 22 16:52:04 2023 -0700 chore: adjust cleanup build target commit 0d90ddd1f2a1a803f2323dae1d33451cb1f229dc Author: peterwoodworth Date: Wed Mar 22 16:51:12 2023 -0700 chore: adjust cleanup build again commit fc80f28dbae48c3c4d63a789ab797f1393e7a704 Author: peterwoodworth Date: Wed Mar 22 16:37:58 2023 -0700 chore: adjust cleanup build and some imports commit 3088522ce8955f12395d0bd90fdf60abbd1365fb Author: peterwoodworth Date: Wed Mar 22 16:32:37 2023 -0700 chore: build and set cleanup file commit 0181111f1de69fc39b5e958e833c0f64d255c138 Author: peterwoodworth Date: Wed Mar 22 15:12:07 2023 -0700 chore: build action commit b72354411530485daa84834184103ba2bcd6ac78 Author: peterwoodworth Date: Wed Mar 22 15:08:39 2023 -0700 fix: branch name is not sanitized, slight refactor commit 0e613a0980cbf65ed5b322eb7a1e075d28913a83 Author: Adrian Środoń Date: Mon Mar 20 23:24:34 2023 +0100 docs: add note about case sensivity to CloudFormation template (#696) * docs: add note about case sensivity to CloudFormation template * Move case-sensivity notes to CloudFormation template commit 12d07d4800b3a0a85481a934f6651b50363994ca Author: Tom Keller Date: Fri Mar 17 17:46:57 2023 -0700 feat: update tests commit f6fdf0cdbd35c53e48896c0163d05ce3dd9dce34 Author: peterwoodworth Date: Wed Mar 15 14:12:01 2023 -0700 feat: aws region is optional, use global sts endpoint when not set commit cb1361ade3c0ad3551e7d2be86bf97afcb51e4d6 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Mon Mar 13 14:50:02 2023 -0700 docs: clarify safety of version migration in readme (#692) commit f9f25e69f508f6ce743f5f5f66869ca4e8355860 Author: peterwoodworth Date: Fri Mar 10 14:35:59 2023 -0800 refactor, add new major version features and proxy commit 83f0e97b2fb73c98881c684ce924d55a17ac73d5 Merge: 7fc5f08 f86a0c3 Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Tue Mar 7 17:36:19 2023 -0800 Merge pull request #687 from fabienfoerster/patch-2 Update README examples to use v2 commit f86a0c30a5007ff5015b73d58eb1cd958bc2aed8 Author: Fabien Foerster Date: Tue Mar 7 21:29:17 2023 +0100 Update README examples to use v2 commit 7fc5f080e5992f77207d1ea0a8d33d4bcd1c80c8 Author: GitHub Actions Date: Tue Mar 7 01:02:37 2023 +0000 chore: Update dist commit ffc08eae7350b1061d7de219e2135c75561fb680 Author: Tom Keller Date: Mon Mar 6 17:01:09 2023 -0800 Rename master to main commit e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef Author: Tom Keller <1083460+kellertk@users.noreply.github.com> Date: Mon Mar 6 16:35:37 2023 -0800 Version 2 release (#685) * Update README for v2 * Remove codeql analysis * Version bump checkout action * Tweak closed issue message * Changes for v2 release * Adjust action name * Update CHANGELOG * Update action to use node 16 * Update versions * Typo commit bab55d3830fe69833c9fecaa51fe2c829a7508f3 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 28 04:09:09 2023 +0000 chore: Bump axios from 1.3.3 to 1.3.4 (#679) Bumps [axios](https://github.com/axios/axios) from 1.3.3 to 1.3.4. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.3.3...v1.3.4) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 07b737a14fc628c3cb8b99a172b150fdea376d25 Author: GitHub Actions Date: Tue Feb 28 04:00:33 2023 +0000 chore: Update dist commit 26e4efa71fbc395226c359bd3f2d5fdd38d9af85 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 28 03:59:47 2023 +0000 chore: Bump aws-sdk from 2.1320.0 to 2.1324.0 (#677) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1320.0 to 2.1324.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1320.0...v2.1324.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 7e0ecf0346f0dff6db9c0bfc56f459e938888730 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 28 03:58:55 2023 +0000 chore: Bump eslint from 8.34.0 to 8.35.0 (#675) Bumps [eslint](https://github.com/eslint/eslint) from 8.34.0 to 8.35.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.34.0...v8.35.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 567d4149d67f15f52b09796bea6573fc32952783 Merge: 32eb1b4 85ec61b Author: Peter Woodworth <44349620+peterwoodworth@users.noreply.github.com> Date: Tue Feb 21 18:49:30 2023 -0800 Merge pull request #563 from russau/docs/readme-role-update Minor: include a role condition test for AUD in sample CFN template commit 32eb1b4a6692b0a851328355719189b79118a174 Author: Jackson Welsh Date: Tue Feb 21 20:36:17 2023 -0600 feat: document mask-aws-account-id input (#606) Closes aws-actions#304 Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 6ca257764313f33f44efb3df7a3e27b3e67f35c7 Author: GitHub Actions Date: Wed Feb 22 02:34:29 2023 +0000 chore: Update dist commit f1de0761358f06cb7dd99ed4830a5a20cb1b00e5 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed Feb 22 02:33:40 2023 +0000 chore: Bump aws-sdk from 2.1273.0 to 2.1320.0 (#666) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1273.0 to 2.1320.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1273.0...v2.1320.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 077fd1fc8f6f9674559d1d11e69e78d182cb5b9f Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 21 04:00:04 2023 +0000 chore: Bump jest from 29.4.2 to 29.4.3 (#663) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 29.4.2 to 29.4.3. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/facebook/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.4.3/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 1416c559c08e8051b3ff6e0739e73ac4bb0d6c57 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 14 04:08:31 2023 +0000 chore: Bump eslint from 8.33.0 to 8.34.0 (#653) Bumps [eslint](https://github.com/eslint/eslint) from 8.33.0 to 8.34.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.33.0...v8.34.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit c3f9f9cde3fd8ff601fe136b97d8e42b4ee16a8d Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 14 04:02:34 2023 +0000 chore: Bump jest from 29.4.1 to 29.4.2 (#655) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 29.4.1 to 29.4.2. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/facebook/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.4.2/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 1a61b71d4c3226b6158c22a9db41b4302ade1351 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 14 04:00:54 2023 +0000 chore: Bump axios from 1.3.2 to 1.3.3 (#651) Bumps [axios](https://github.com/axios/axios) from 1.3.2 to 1.3.3. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.3.2...v1.3.3) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 07d5781915aab44cbbcbd613a6a8c86b5e9f2f94 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Feb 7 03:03:40 2023 +0000 chore: Bump axios from 1.2.6 to 1.3.2 (#648) Bumps [axios](https://github.com/axios/axios) from 1.2.6 to 1.3.2. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.2.6...v1.3.2) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 5ed46d9d6a0a7a1132bc73efce74908f5d34dfc8 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 31 03:10:42 2023 +0000 chore: Bump eslint from 8.32.0 to 8.33.0 (#645) Bumps [eslint](https://github.com/eslint/eslint) from 8.32.0 to 8.33.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.32.0...v8.33.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 0d34112acb9bfda01cd826ad191a480823051ecd Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 31 03:06:27 2023 +0000 chore: Bump jest from 29.3.1 to 29.4.1 (#643) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 29.3.1 to 29.4.1. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/facebook/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.4.1/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit fa45146389f490cc9359e08db2b1d23f674e2dd3 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 31 03:05:11 2023 +0000 chore: Bump @vercel/ncc from 0.36.0 to 0.36.1 (#640) Bumps [@vercel/ncc](https://github.com/vercel/ncc) from 0.36.0 to 0.36.1. - [Release notes](https://github.com/vercel/ncc/releases) - [Commits](https://github.com/vercel/ncc/compare/0.36.0...0.36.1) --- updated-dependencies: - dependency-name: "@vercel/ncc" dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit a075dc0410b62976e297755bccb43bf800f5caea Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 31 03:03:44 2023 +0000 chore: Bump axios from 1.2.3 to 1.2.6 (#639) Bumps [axios](https://github.com/axios/axios) from 1.2.3 to 1.2.6. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.2.3...v1.2.6) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 3e21ee4ae75b22a2ebdacb5d2b0b875da49dd76f Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 24 03:04:28 2023 +0000 chore: Bump axios from 1.2.2 to 1.2.3 (#630) Bumps [axios](https://github.com/axios/axios) from 1.2.2 to 1.2.3. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/1.2.2...v1.2.3) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 491cd39cf656af766c53f4769f72b3ea6f50fdd2 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 17 03:05:35 2023 +0000 chore: Bump eslint from 8.31.0 to 8.32.0 (#625) Bumps [eslint](https://github.com/eslint/eslint) from 8.31.0 to 8.32.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.31.0...v8.32.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 3654529dc6db288721684d6c54fefa0c1182728f Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 3 03:06:41 2023 +0000 chore: Bump json5 from 2.2.1 to 2.2.3 (#620) Bumps [json5](https://github.com/json5/json5) from 2.2.1 to 2.2.3. - [Release notes](https://github.com/json5/json5/releases) - [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md) - [Commits](https://github.com/json5/json5/compare/v2.2.1...v2.2.3) --- updated-dependencies: - dependency-name: json5 dependency-type: indirect ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit aa5a0310413033fa7579943035155977d62bfd72 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 3 03:04:46 2023 +0000 chore: Bump eslint from 8.30.0 to 8.31.0 (#619) Bumps [eslint](https://github.com/eslint/eslint) from 8.30.0 to 8.31.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.30.0...v8.31.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 55443b3d5b0ed94593f78777e1bdad4156dee318 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Jan 3 03:03:28 2023 +0000 chore: Bump axios from 1.2.1 to 1.2.2 (#616) Bumps [axios](https://github.com/axios/axios) from 1.2.1 to 1.2.2. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.2.1...1.2.2) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 48f0cb78484f48158b435f99ba5f1e3171322334 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Dec 20 03:04:13 2022 +0000 chore: Bump eslint from 8.29.0 to 8.30.0 (#610) Bumps [eslint](https://github.com/eslint/eslint) from 8.29.0 to 8.30.0. - [Release notes](https://github.com/eslint/eslint/releases) - [Changelog](https://github.com/eslint/eslint/blob/main/CHANGELOG.md) - [Commits](https://github.com/eslint/eslint/compare/v8.29.0...v8.30.0) --- updated-dependencies: - dependency-name: eslint dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 90d1b38cb5ab21f5b1c557a8ca6daecc89f2a8df Author: GitHub Actions Date: Tue Dec 13 03:05:43 2022 +0000 chore: Update dist commit 422c79728525dfccd43a29156f027662876488e4 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Dec 13 03:04:51 2022 +0000 chore: Bump aws-sdk from 2.1268.0 to 2.1273.0 (#603) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1268.0 to 2.1273.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1268.0...v2.1273.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> commit 4b942d82b09352f2fe0ab51f6aaf75fef217ed6a Author: GitHub Actions Date: Tue Dec 6 03:10:45 2022 +0000 chore: Update dist commit 7437ed192ed1cd042e4a251b98d5843ee7d125f4 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Dec 6 03:09:51 2022 +0000 chore: Bump aws-sdk from 2.1262.0 to 2.1268.0 (#596) Bumps [aws-sdk](https://github.com/aws/aws-sdk-js) from 2.1262.0 to 2.1268.0. - [Release notes](https://github.com/aws/aws-sdk-js/releases) - [Changelog](https://github.com/aws/aws-sdk-js/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-js/compare/v2.1262.0...v2.1268.0) --- updated-dependencies: - dependency-name: aws-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit ed4ba44f95cfb81f190b088d9e42738831921e7e Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Dec 6 03:08:24 2022 +0000 chore: Bump @vercel/ncc from 0.34.0 to 0.36.0 (#594) Bumps [@vercel/ncc](https://github.com/vercel/ncc) from 0.34.0 to 0.36.0. - [Release notes](https://github.com/vercel/ncc/releases) - [Commits](https://github.com/vercel/ncc/compare/0.34.0...0.36.0) --- updated-dependencies: - dependency-name: "@vercel/ncc" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> commit 10dc6d923a8bb9f4c85bdbf91a2fe0b1456c7787 Author: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue Dec 6 03:05:08 2022 +0000 chore: Bump axios from 1.2.0 to 1.2.1 (#590) Bumps [axios](https://github.… * chore: tweak readme for v3 --- .editorconfig | 19 + .eslintignore | 2 + .eslintrc.json | 18 - .eslintrc.yml | 168 + .gitattributes | 7 + .github/workflows/check.yml | 16 - .github/workflows/close-stale-issues.yml | 51 - .github/workflows/closed-issue-message.yml | 5 +- .../workflows/handle-stale-discussions.yml | 2 +- .github/workflows/package.yml | 4 +- .github/workflows/pull-request-lint.yml | 27 + .github/workflows/tests-integ.yml | 68 + .github/workflows/tests-unit.yml | 41 + .gitignore | 70 +- .prettierrc.json | 8 + CHANGELOG.md | 19 + LICENSE | 4 +- README.md | 450 +- THIRD-PARTY | 11662 ++- action.yml | 109 +- cleanup.test.js | 51 - dist/cleanup/index.js | 18685 ++++- dist/cleanup/src/CredentialsClient.d.ts | 14 + dist/cleanup/src/assumeRole.d.ts | 15 + dist/cleanup/src/cleanup/index.d.ts | 11 + dist/cleanup/src/helpers.d.ts | 16 + dist/cleanup/src/index.d.ts | 1 + dist/cleanup/test/cleanup.test.d.ts | 1 + dist/cleanup/test/helpers.test.d.ts | 1 + dist/cleanup/test/index.test.d.ts | 1 + dist/index.js | 62865 +++++----------- .../.github/workflows/deploy.yml | 2 +- index.js | 429 - index.test.js | 1021 - jest.config.cjs | 31 + package-lock.json | 18293 ++++- package.json | 80 +- src/CredentialsClient.ts | 70 + src/assumeRole.ts | 169 + cleanup.js => src/cleanup/index.ts | 21 +- src/helpers.ts | 142 + src/index.ts | 182 + test/cleanup.test.ts | 52 + test/helpers.test.ts | 26 + test/index.test.ts | 847 + tsconfig.build.json | 9 + tsconfig.json | 34 + 47 files changed, 67204 insertions(+), 48615 deletions(-) create mode 100644 .editorconfig create mode 100644 .eslintignore delete mode 100644 .eslintrc.json create mode 100644 .eslintrc.yml create mode 100644 .gitattributes delete mode 100644 .github/workflows/check.yml delete mode 100644 .github/workflows/close-stale-issues.yml create mode 100644 .github/workflows/pull-request-lint.yml create mode 100644 .github/workflows/tests-integ.yml create mode 100644 .github/workflows/tests-unit.yml create mode 100644 .prettierrc.json delete mode 100644 cleanup.test.js create mode 100644 dist/cleanup/src/CredentialsClient.d.ts create mode 100644 dist/cleanup/src/assumeRole.d.ts create mode 100644 dist/cleanup/src/cleanup/index.d.ts create mode 100644 dist/cleanup/src/helpers.d.ts create mode 100644 dist/cleanup/src/index.d.ts create mode 100644 dist/cleanup/test/cleanup.test.d.ts create mode 100644 dist/cleanup/test/helpers.test.d.ts create mode 100644 dist/cleanup/test/index.test.d.ts delete mode 100644 index.js delete mode 100644 index.test.js create mode 100644 jest.config.cjs create mode 100644 src/CredentialsClient.ts create mode 100644 src/assumeRole.ts rename cleanup.js => src/cleanup/index.ts (79%) create mode 100644 src/helpers.ts create mode 100644 src/index.ts create mode 100644 test/cleanup.test.ts create mode 100644 test/helpers.test.ts create mode 100644 test/index.test.ts create mode 100644 tsconfig.build.json create mode 100644 tsconfig.json diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..2f142d166 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,19 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab +indent_size = 4 diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 000000000..9d0b71a3c --- /dev/null +++ b/.eslintignore @@ -0,0 +1,2 @@ +build +dist diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index 5b8a895a8..000000000 --- a/.eslintrc.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "env": { - "commonjs": true, - "es6": true, - "node": true, - "jest": true - }, - "extends": "eslint:recommended", - "globals": { - "Atomics": "readonly", - "SharedArrayBuffer": "readonly" - }, - "parserOptions": { - "ecmaVersion": 2018 - }, - "rules": { - } -} diff --git a/.eslintrc.yml b/.eslintrc.yml new file mode 100644 index 000000000..d5f78842b --- /dev/null +++ b/.eslintrc.yml @@ -0,0 +1,168 @@ +env: + jest: true + node: true +root: true +plugins: + - import + - prettier +parserOptions: + ecmaVersion: 2021 + sourceType: module +extends: + - plugin:prettier/recommended + - prettier +rules: + prettier/prettier: [error] + import/no-extraneous-dependencies: + - error + - devDependencies: + - "**/test/**" + - "**/build-tools/**" + optionalDependencies: false + peerDependencies: true + import/no-unresolved: [error] + import/order: + - warn + - groups: + - builtin + - external + alphabetize: + order: asc + caseInsensitive: true + array-callback-return: [warn] + no-await-in-loop: [warn] + no-constant-binary-expression: [error] + no-constructor-return: [error] + no-duplicate-imports: [error] + no-self-compare: [warn] + no-template-curly-in-string: [error] + no-unmodified-loop-condition: [error] + no-unreachable-loop: [error] + no-unused-private-class-members: [error] + no-use-before-define: [error] + require-atomic-updates: [error] + block-scoped-var: [warn] + camelcase: [warn] + class-methods-use-this: [error] + consistent-return: [warn] + consistent-this: [warn] + default-case-last: [warn] + default-param-last: [warn] + dot-notation: [error] + eqeqeq: [error] + guard-for-in: [warn] + logical-assignment-operators: + - error + - always + - enforceForIfStatements: false + no-array-constructor: [error] + no-bitwise: [error] + no-console: [warn] + no-empty-function: [warn] + no-eval: [error] + no-extra-bind: [error] + no-labels: [error] + no-implicit-globals: [error] + no-invalid-this: [error] + key-spacing: [error] + no-multiple-empty-lines: [error] + no-return-await: [warn] + no-trailing-spaces: [error] + no-lonely-if: [error] + no-nested-ternary: [warn] + no-mixed-operators: [warn] + no-proto: [error] + no-sequences: [error] + no-throw-literal: [error] + no-useless-call: [error] + no-useless-concat: [warn] + no-var: [error] + one-var-declaration-per-line: [error] + prefer-const: [warn] + prefer-arrow-callback: [warn] + prefer-regex-literals: [warn] + prefer-promise-reject-errors: [warn] + prefer-spread: [warn] + prefer-template: [warn] + require-await: [error] +overrides: + - files: + - '**/*.ts' + parser: '@typescript-eslint/parser' + parserOptions: + ecmaVersion: 2021 + sourceType: module + project: ./tsconfig.json + extends: + - plugin:@typescript-eslint/recommended + - plugin:@typescript-eslint/recommended-requiring-type-checking + - plugin:import/typescript + rules: + '@typescript-eslint/array-type': + - warn + - default: array-simple + '@typescript-eslint/ban-tslint-comment': [error] + '@typescript-eslint/consistent-indexed-object-style': [warn] + '@typescript-eslint/consistent-type-assertions': [warn] + '@typescript-eslint/prefer-includes': [warn] + dot-notation: [off] + '@typescript-eslint/dot-notation': [error] + '@typescript-eslint/no-explicit-any': [off] + '@typescript-eslint/consistent-type-exports': [warn] + '@typescript-eslint/consistent-type-imports': [warn] + '@typescript-eslint/no-base-to-string': [error] + '@typescript-eslint/no-confusing-non-null-assertion': [warn] + '@typescript-eslint/no-invalid-void-type': [error] + '@typescript-eslint/no-meaningless-void-operator': [warn] + '@typescript-eslint/no-redundant-type-constituents': [warn] + '@typescript-eslint/no-unnecessary-boolean-literal-compare': [warn] + '@typescript-eslint/no-unnecessary-condition': [warn] + '@typescript-eslint/no-unnecessary-qualifier': [warn] + '@typescript-eslint/no-unnecessary-type-arguments': [warn] + '@typescript-eslint/non-nullable-type-assertion-style': [warn] + '@typescript-eslint/prefer-for-of': [error] + '@typescript-eslint/prefer-literal-enum-member': [warn] + '@typescript-eslint/prefer-optional-chain': [warn] + '@typescript-eslint/prefer-readonly': [warn] + '@typescript-eslint/prefer-regexp-exec': [warn] + '@typescript-eslint/prefer-string-starts-ends-with': [warn] + '@typescript-eslint/prefer-ts-expect-error': [error] + '@typescript-eslint/promise-function-async': [warn] + '@typescript-eslint/require-array-sort-compare': [error] + default-param-last: [off] + '@typescript-eslint/default-param-last': [warn] + no-array-constructor: [off] + '@typescript-eslint/no-array-constructor': [error] + no-dupe-class-members: [off] + '@typescript-eslint/no-dupe-class-members': [warn] + no-invalid-this: [off] + '@typescript-eslint/no-invalid-this': [warn] + no-unused-vars: [off] + '@typescript-eslint/no-unused-vars': + - error + - varsIgnorePattern: '^_' + argsIgnorePattern: '^_' + caughtErrorsIgnorePattern: '^_' + '@typescript-eslint/no-non-null-assertion': [off] + '@typescript-eslint/no-require-imports': + - error + no-return-await: [off] + '@typescript-eslint/return-await': [error] + no-shadow: [off] + '@typescript-eslint/no-shadow': [error] + '@typescript-eslint/no-floating-promises': [error] + "@typescript-eslint/member-ordering": + - error + - default: + - public-static-field + - public-static-method + - protected-static-field + - protected-static-method + - private-static-field + - private-static-method + - field + - constructor + - method + no-use-before-define: [off] + '@typescript-eslint/no-use-before-define': [error] + no-duplicate-imports: [off] diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..8aae98f6d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,7 @@ +/dist/** linguist-generated +/package-lock.json linguist-generated +*.json text eol=lf +*.js text eol=lf +*.jsx text eol=lf +*.ts text eol=lf +*.tsx text eol=lf diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml deleted file mode 100644 index eb50f2570..000000000 --- a/.github/workflows/check.yml +++ /dev/null @@ -1,16 +0,0 @@ -on: - [pull_request] - -name: Check - -jobs: - check: - name: Run Unit Tests - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Run tests - run: | - npm ci - npm test diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml deleted file mode 100644 index 157ad72e8..000000000 --- a/.github/workflows/close-stale-issues.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: "Close Stale Issues" - -# Controls when the action will run. -on: - workflow_dispatch: - schedule: - - cron: "0 */4 * * *" - -jobs: - cleanup: - permissions: - issues: write - contents: read - pull-requests: write - runs-on: ubuntu-latest - name: Stale issue job - steps: - - uses: aws-actions/stale-issue-cleanup@v5 - with: - # Setting messages to an empty string will cause the automation to skip - # that category - ancient-issue-message: This issue has not received any attention in 1 year. If you want to keep this issue open, please leave a comment below and auto-close will be canceled. - stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled. - stale-pr-message: This PR has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled. - - # These labels are required - stale-issue-label: closing-soon - exempt-issue-labels: no-autoclose - stale-pr-label: closing-soon - exempt-pr-labels: no-autoclose - response-requested-label: response-requested - - # Don't set closed-for-staleness label to skip closing very old issues - # regardless of label - closed-for-staleness-label: closed-for-staleness - - # Issue timing - days-before-stale: 5 - days-before-close: 2 - days-before-ancient: 36500 - - # If you don't want to mark a issue as being ancient based on a - # threshold of "upvotes", you can set this here. An "upvote" is - # the total number of +1, heart, hooray, and rocket reactions - # on an issue. - minimum-upvotes-to-exempt: 5 - - repo-token: ${{ secrets.GITHUB_TOKEN }} - loglevel: DEBUG - # Set dry-run to true to not perform label or close actions. - dry-run: false \ No newline at end of file diff --git a/.github/workflows/closed-issue-message.yml b/.github/workflows/closed-issue-message.yml index a56912e7f..ecffff397 100644 --- a/.github/workflows/closed-issue-message.yml +++ b/.github/workflows/closed-issue-message.yml @@ -13,6 +13,7 @@ jobs: # These inputs are both required repo-token: "${{ secrets.GITHUB_TOKEN }}" message: | - Comments on closed issues are hard for our team to see. + ** Note ** + Comments on closed issues are hard for our team to see. If you need more assistance, please either tag a team member or open a new issue that references this one. - If you wish to keep having a conversation with other community members under this issue feel free to do so. \ No newline at end of file + If you wish to keep having a conversation with other community members under this issue feel free to do so. diff --git a/.github/workflows/handle-stale-discussions.yml b/.github/workflows/handle-stale-discussions.yml index 2b89f2da1..8b5bcb1f8 100644 --- a/.github/workflows/handle-stale-discussions.yml +++ b/.github/workflows/handle-stale-discussions.yml @@ -15,4 +15,4 @@ jobs: - name: Stale discussions action uses: aws-github-ops/handle-stale-discussions@v1 env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index 559634572..f4d20b48c 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -1,4 +1,4 @@ -name: Update dist files on +name: Update dist files on main branch on: push: @@ -27,7 +27,7 @@ jobs: npm test npm run package - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: aws-region: us-west-2 role-to-assume: ${{ secrets.SECRETS_AWS_ROLE_TO_ASSUME }} diff --git a/.github/workflows/pull-request-lint.yml b/.github/workflows/pull-request-lint.yml new file mode 100644 index 000000000..0f90ee7a9 --- /dev/null +++ b/.github/workflows/pull-request-lint.yml @@ -0,0 +1,27 @@ + +name: pull-request-lint +on: + pull_request_target: + types: + - labeled + - opened + - synchronize + - reopened + - ready_for_review + - edited +jobs: + validate: + name: Validate PR title + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: amannn/action-semantic-pull-request@v4.5.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: |- + feat + fix + chore + requireScope: false diff --git a/.github/workflows/tests-integ.yml b/.github/workflows/tests-integ.yml new file mode 100644 index 000000000..0ed6c2e3e --- /dev/null +++ b/.github/workflows/tests-integ.yml @@ -0,0 +1,68 @@ +name: Run tests + +on: + workflow_dispatch: + +jobs: + integ-oidc: + permissions: + contents: read + id-token: write + strategy: + fail-fast: false + matrix: + os: [[self-hosted, linux-fargate], windows-latest, ubuntu-latest, macos-latest] + node: [14, 16, 18] + name: Run OIDC integ tests + runs-on: ${{ matrix.os }} + timeout-minutes: 30 + steps: + - name: "Checkout repository" + uses: actions/checkout@v3 + - name: Integ test for OIDC + uses: ./ + with: + aws-region: us-west-2 + role-to-assume: ${{ secrets.SECRETS_OIDC_AWS_ROLE_TO_ASSUME }} + role-duration-seconds: 900 + role-session-name: IntegOidcAssumeRole + role-external-id: ${{ secrets.SECRETS_OIDC_AWS_ROLE_EXTERNAL_ID }} + integ-access-keys: + strategy: + fail-fast: false + matrix: + os: [[self-hosted, linux-fargate], windows-latest, ubuntu-latest, macos-latest] + node: [14, 16, 18] + name: Run access key integ tests + runs-on: ${{ matrix.os }} + timeout-minutes: 30 + steps: + - name: "Checkout repository" + uses: actions/checkout@v3 + - name: Integ test for access keys + uses: ./ + with: + aws-region: us-west-2 + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + role-to-assume: ${{ secrets.SECRETS_AWS_ROLE_TO_ASSUME }} + role-session-name: IntegAccessKeysAssumeRole + role-external-id: ${{ secrets.SECRETS_AWS_ROLE_EXTERNAL_ID }} + integ-iam-user: + strategy: + fail-fast: false + matrix: + os: [[self-hosted, linux-fargate], windows-latest, ubuntu-latest, macos-latest] + node: [14, 16, 18] + name: Run IAM User integ tests + runs-on: ${{ matrix.os }} + timeout-minutes: 30 + steps: + - name: "Checkout repository" + uses: actions/checkout@v3 + - name: Integ test for IAM user + uses: ./ + with: + aws-region: us-west-2 + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/tests-unit.yml b/.github/workflows/tests-unit.yml new file mode 100644 index 000000000..2344be50b --- /dev/null +++ b/.github/workflows/tests-unit.yml @@ -0,0 +1,41 @@ +on: + [pull_request] + +name: Run unit tests + +jobs: + unit-test: + strategy: + fail-fast: false + matrix: + os: [windows-latest, ubuntu-latest, macos-latest] + node: [14, 16, 18] + name: Run unit tests + runs-on: ${{ matrix.os }} + timeout-minutes: 5 + steps: + - name: "Checkout repository" + uses: actions/checkout@v3 + - name: "Setup node" + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node }} + - name: "Install dependencies" + uses: bahmutov/npm-install@v1 + - name: "Run tests" + run: npm run test --if-present + lint: + name: Ensure code standards + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout repostiory" + uses: actions/checkout@v3 + - name: "Setup node" + uses: actions/setup-node@v3 + with: + node-version: 16 + - name: "Install dependencies" + uses: bahmutov/npm-install@v1 + - name: "Lint code" + run: npm run lint --if-present diff --git a/.gitignore b/.gitignore index 77aaf7519..bc18168c2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,66 +1,6 @@ -node_modules/ - -# Editors -.vscode -.idea - -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# Runtime data -pids -*.pid -*.seed -*.pid.lock - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul +node_modules coverage - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# Bower dependency directory (https://bower.io/) -bower_components - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (https://nodejs.org/api/addons.html) -build/Release - -# Other Dependency directories -jspm_packages/ - -# TypeScript v1 declaration files -typings/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity - -# dotenv environment variables file -.env - -# next.js build output -.next +.DS_Store +Thumbs.db +build +test-reports diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 000000000..97a0bbc8c --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,8 @@ +{ + "printWidth": 120, + "semi": true, + "singleQuote": true, + "trailingComma": "es5", + "bracketSpacing": true, + "overrides": [] +} diff --git a/CHANGELOG.md b/CHANGELOG.md index db5b9ac50..dc1dc0939 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,25 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [3.0.0](https://github.com/aws-actions/configure-aws-credentials/compare/v2.2.0...v3.0.0) (2023-08-21) + +### Features +* Can configure `max-retries` and `disable-retry` to modify retry functionality when the assume role call fails +* Set returned credentials as step outputs with `output-credentials` +* Clear AWS related environment variables at the start of the action with `unset-current-credentials` +* Unique role identifier is now printed in the workflow logs + +### Bug Fixes +* Can't use credentials if they contain a special character +* Retry functionality added when generating the JWT fails +* Can now use `webIdentityTokenFile` option +* Branch name validation too strict +* JS SDK v2 deprecation warning in workflow logs + +### Changes to existing functionality +* Default session duration is now 1 hour in all cases (from 6 hours in some cases) +* Account ID will not be masked by default in logs + ## [2.2.0](https://github.com/aws-actions/configure-aws-credentials/compare/v2.1.0...v2.2.0) (2023-05-31) ### Features diff --git a/LICENSE b/LICENSE index 1f7884179..9c22b5d9f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,4 @@ -MIT License - -Copyright 2019 Amazon.com, Inc. or its affiliates. +Copyright (c) 2019-2022 Amazon.com, Inc. or its affiliates Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index adaebfb4f..d6266a294 100644 --- a/README.md +++ b/README.md @@ -5,43 +5,76 @@ and exports environment variables for your other Actions to use. Environment variable exports are detected by both the AWS SDKs and the AWS CLI for AWS API calls. -### Recent updates +--- -**GitHub OIDC Changes** +### News -In #[357](https://github.com/aws-actions/configure-aws-credentials/issues/357), we -observed that GitHub recently started offering one of several intermediate OIDC -endpoint thumbprints. Because IAM requires statically configured endpoint -thumbprints, AWS customers that had only one thumbprint configured could see -intermittent authentication failures. **As of July 6, 2023, AWS has made a change to -IAM that will no longer require any particular certificate thumbprint for -tokens.actions.githubusercontent.com**, which is the GitHub OIDC endpoint. Instead, -AWS secures communication with GitHub OIDC using our library of trusted CAs rather -than using a certificate thumbprint to verify the server certificate. The IAM APIs -still require that a thumbprint is configured, but those thumbprints will be ignored -when authenticating tokens.actions.githubusercontent.com. +We have recently released `v3` of Configure AWS Credentials! With this new +release we have migrated the code to TypeScript, and have also migrated away +from using `v2` of the JavaScript AWS SDK. This should eliminate the warning you +have seen in your workflow logs about `v2` deprecation. -GitHub Enterprise Server customers use a different endpoint so they are not affected by -this change. +In addition to the refactored codebase, we have also introduced some changes to +existing functionality, added some new features, and fixed some bugs. These +changes should be backwards compatible with your existing workflows. -*Original message:* -There are now [two possible intermediary certificates](https://github.blog/changelog/2023-06-27-github-actions-update-on-oidc-integration-with-aws/) for the Actions SSL certificate. Previously, the certificate with the thumbprint `6938fd4d98bab03faadb97b34396831e3780aea1` was guaranteed to return. Now, the certificate with the thumbprint `1c58a3a8518e8759bf075b76b750d4f2df264fcd` can also be returned, so you will need to [update your identity provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc.html) with this additional new thumbprint. +**Notable changes to existing functionality** + +- By default, the assumed role credentials will only be valid for one hour in +_all_ use cases. This is changed from 6 hours in `v2`. You can adjust this value +with the `role-duration-seconds` input. +- By default, your account ID will not be masked in workflow logs. This was +changed from being masked by default in the previous version. AWS does consider +account IDs as sensitive information, so this change reflects that stance. You +can rever to the old default and mask your account ID in workflow logs by +setting the `mask-aws-account-id` input to `true`. + +**New features** + +- You can now configure retry settings in case your STS call fails. By default, +we retry with exponential backoff twelve times. You can disable this behavior +altogether by setting the `disable-retry` input to `true`, or you can configure +the number of times the action will retry with the `retry-max-attempts` input. +- You can now set the returned credentials as action step outputs. To do this, +you can set the `output-credentials` prop to `true`. +- There's now an option to clear the AWS-related environment variables at the +start of the action. Clearing these variables is often a workaround for +problems, so enabling this can be helpful if existing credentials or environment +variables are interfering with the action. You can enable this by setting the +`unset-current-credentials` input to `true`. + +**Bug fixes** + +You can find a list of bugs that have been fixed in v3 in the +[changelog](./changelog.md). + +--- ### Table of Contents -- [Usage](#usage) -- [Credentials](#credentials) -- [Assuming a Role](#assuming-a-role) - + [Session tagging](#session-tagging) +- [Overview](#overview) +- [Security recommendations](#security-recommendations) +- [Using this action](#using-this-action) + + [Credential Lifetime](#credential-lifetime) + + [External ID](#external-id) + + [Session tagging](#session-tagging-and-name) + [Sample IAM Role Permissions](#sample-iam-role-cloudformation-template) + + [Misc](#misc) +- [OIDC](#OIDC) + + [Audience](#audience) + + [Sample IAM OIDC CloudFormation Template](#sample-iam-oidc-cloudformation-template) + + [Claims and scoping permissions](#claims-and-scoping-permissions) + + [Further info](#further-info) - [Self-Hosted Runners](#self-hosted-runners) + [Proxy Configuration](#proxy-configuration) + + [Use with the AWS CLI](#use-with-the-aws-cli) +- [Examples](#examples) - [License Summary](#license-summary) - [Security Disclosures](#security-disclosures) -## Usage -We support four methods for fetching credentials from AWS, but we recommend that +## Overview +We support five methods for fetching credentials from AWS, but we recommend that you use GitHub's OIDC provider in conjunction with a configured AWS IAM Identity Provider endpoint. @@ -49,14 +82,14 @@ To do that, you would add the following step to your workflow: ```yaml - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role aws-region: us-east-2 ``` -This will cause the action to perform an `AssumeRoleWithWebIdentity` call and -return temporary security credentials for use by other actions. In order for -this to work, you'll need to preconfigure the IAM IdP in your AWS account +This will cause the action to perform an [`AssumeRoleWithWebIdentity`](https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html) call and +return temporary security credentials for use by other steps in your workflow. In order for +this to work, you'll need to preconfigure the IAM Identity Provider in your AWS account (see [Assuming a Role](#assuming-a-role) for details). You can use this action with the AWS CLI available in @@ -78,7 +111,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Configure AWS credentials from Test account - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: role-to-assume: arn:aws:iam::111111111111:role/my-github-actions-role-test aws-region: us-east-1 @@ -86,7 +119,7 @@ jobs: run: | aws s3 sync . s3://my-s3-test-website-bucket - name: Configure AWS credentials from Production account - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: role-to-assume: arn:aws:iam::222222222222:role/my-github-actions-role-prod aws-region: us-west-2 @@ -98,7 +131,13 @@ jobs: See [action.yml](action.yml) for the full documentation for this action's inputs and outputs. -## Credentials +### Note about GHES + +Some of this documentation may be inaccurate if you are using GHES (GitHub Enterprise Servers), please take note to review the GitHub documentation when relevant. + +For example, the URL that the OIDC JWT is issued from is different than the usual `tokens.actions.githubusercontent.com`, and will be unique to your enterprise server. As a result, you will need to configure this differently when you create the Identity Provider. + +## Security recommendations We recommend following [Amazon IAM best practices](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html) @@ -109,15 +148,18 @@ for the AWS credentials used in GitHub Actions workflows, including: GitHub Actions workflows. * [Monitor the activity](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#keep-a-log) of the credentials used in GitHub Actions workflows. -## Assuming a Role -There are four different supported ways to retrieve credentials. We recommend -using [GitHub's OIDC provider](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services) -to get short-lived credentials needed for your actions. Specifying -`role-to-assume` **without** providing an `aws-access-key-id` or a -`web-identity-token-file`, or setting `role-chaining`, will signal to the action that you wish to use the -OIDC provider. If `role-chaining` is `true`, existing credentials in the environment will be used to assume `role-to-assume`. +## Using this action +There are five different supported ways to retrieve credentials: + +- Using GitHub's OIDC provider (`AssumeRoleWithWebIdentity`) +- Proceeding as an IAM user (No STS call is made) +- Using access keys as action input (`AssumeRole`) +- Using a WebIdentity Token File (`AssumeRoleWithWebIdentity`) +- Using existing credentials in your runner (`AssumeRole`) + +We recommend using [GitHub's OIDC provider](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services) to get short-lived AWS credentials needed for your actions. See [OIDC](#OIDC) for more information on how to setup your AWS account to assume a role with OIDC. -The following table describes which identity is used based on which values are supplied to the Action: +The following table describes which method is used based on which values are supplied to the Action: | **Identity Used** | `aws-access-key-id` | `role-to-assume` | `web-identity-token-file` | `role-chaining` | | --------------------------------------------------------------- | ------------------- | ---------------- | ------------------------- | - | @@ -128,97 +170,135 @@ The following table describes which identity is used based on which values are s | Assume Role using existing credentials | | ✔ | | ✔ | ### Credential Lifetime -The default session duration is **1 hour** when using the OIDC provider to -directly assume an IAM Role or when an `aws-session-token` is directly provided. -The default session duration is **6 hours** when using an IAM User to assume an -IAM Role (by providing an `aws-access-key-id`, `aws-secret-access-key`, and a -`role-to-assume`) . +The default session duration is **1 hour**. If you would like to adjust this you can pass a duration to `role-duration-seconds`, but the duration cannot exceed the maximum that was defined when the IAM Role was created. -The default session name is GitHubActions, and you can modify it by specifying the desired name in `role-session-name`. -The default audience is `sts.amazonaws.com` which you can replace by specifying the desired audience name in `audience`. -### Examples +### External ID +If your role requires an external ID to assume, you can provide the external ID with the `role-external-id` input + +### Session tagging and name +The default session name is "GitHubActions", and you can modify it by specifying the desired name in `role-session-name`. +The session will be tagged with the following tags: (`GITHUB_` environment variable definitions can be +[found here](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-environment-variables#default-environment-variables)) + +| Key | Value | +| ---------- | ----------------- | +| GitHub | "Actions" | +| Repository | GITHUB_REPOSITORY | +| Workflow | GITHUB_WORKFLOW | +| Action | GITHUB_ACTION | +| Actor | GITHUB_ACTOR | +| Branch | GITHUB_REF | +| Commit | GITHUB_SHA | -#### AssumeRoleWithWebIdentity (recommended) +_Note: all tag values must conform to +[the requirements](https://docs.aws.amazon.com/STS/latest/APIReference/API_Tag.html). +Particularly, `GITHUB_WORKFLOW` will be truncated if it's too long. If +`GITHUB_ACTOR` or `GITHUB_WORKFLOW` contain invalid characters, the characters +will be replaced with an '*'._ + +The action will use session tagging by default during role assumption, unless you are assuming a role with a WebIdentity. +For WebIdentity role assumption, the session tags have to be included +in the encoded WebIdentity token. This means that Tags can only be supplied by +the OIDC provider, and they cannot set during the AssumeRoleWithWebIdentity API call +within the Action. See [issue 419](https://github.com/aws-actions/configure-aws-credentials/issues/419) for more info + +You can skip this session tagging by providing +`role-skip-session-tagging` as true in the action's inputs: ```yaml - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: - aws-region: us-east-2 - role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role - role-session-name: MySessionName + role-skip-session-tagging: true ``` -In this example, the Action will load the OIDC token from the GitHub-provided environment variable and use it to assume the role `arn:aws:iam::123456789100:role/my-github-actions-role` with the session name `MySessionName`. -#### AssumeRole with static IAM credentials in repository secrets +### Session policies + +#### Inline session policies +An IAM policy in stringified JSON format that you want to use as an inline session policy. +Depending on preferences, the JSON could be written on a single line like this: ```yaml - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: - aws-region: us-east-2 - role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role - role-session-name: MySessionName - - name: Configure other AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + inline-session-policy: '{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:List*","Resource":"*"}]}' +``` +Or we can have a nicely formatted JSON as well: +```yaml + uses: aws-actions/configure-aws-credentials@v3 with: - aws-region: us-east-2 - role-to-assume: arn:aws:iam::987654321000:role/my-second-role - role-session-name: MySessionName - role-chaining: true + inline-session-policy: >- + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid":"Stmt1", + "Effect":"Allow", + "Action":"s3:List*", + "Resource":"*" + } + ] + } ``` -In this two-step example, the first step will use OIDC to assume the role `arn:aws:iam::123456789100:role/my-github-actions-role` just as in the prior example. Following that, a second step will use this role to assume a different role, `arn:aws:iam::987654321000:role/my-second-role`. +#### Managed session policies +The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as managed session policies. +The policies must exist in the same account as the role. You can pass a single managed policy like this: ```yaml - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} - role-external-id: ${{ secrets.AWS_ROLE_EXTERNAL_ID }} - role-duration-seconds: 1200 - role-session-name: MySessionName + managed-session-policies: arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess ``` -In this example, the secret `AWS_ROLE_TO_ASSUME` contains a string like `arn:aws:iam::123456789100:role/my-github-actions-role`. To assume a role in the same account as the static credentials, you can simply specify the role name, like `role-to-assume: my-github-actions-role`. - -#### AssumeRoleWithWebIdentity using a custom audience +And we can pass multiple managed policies likes this: ```yaml - - name: Configure AWS Credentials for Beta Customers - uses: aws-actions/configure-aws-credentials@v2 + uses: aws-actions/configure-aws-credentials@v3 with: - audience: beta-customers - aws-region: us-east-3 - role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role - role-session-name: MySessionName + managed-session-policies: | + arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess + arn:aws:iam::aws:policy/AmazonS3OutpostsReadOnlyAccess ``` -In this example, the audience has been changed from the default to use a different audience name `beta-customers`. This can help ensure that the role can only affect those AWS accounts whose GitHub OIDC providers have explicitly opted in to the `beta-customers` label. -Changing the default audience may be necessary when using non-default [AWS partitions](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html). +### Misc + +#### Adjust the retry mechanism +You can now configure retry settings for when the STS call fails. By default, we retry with exponential backoff `12` times. You can disable this behavior altogether by setting the `disable-retry` input to `true`, or you can configure the number of times it retries with the `retry-max-attempts` input. + +#### Mask account ID +Your account ID is not masked by default in workflow logs since it's not considered sensitive information. However, you can set the `mask-aws-account-id` input to `true` to mask your account ID in workflow logs if desired. + +#### Unset current credentials +Sometimes, existing credentials in your runner can get in the way of the intended outcome, and the recommended solution is to include another step in your workflow which unsets the environment variables set by this action. Now if you set the `unset-current-credentials` input to `true`, the workaround is made eaiser + +## OIDC + +We recommend using [GitHub's OIDC provider](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services) to get short-lived AWS credentials needed for your actions. When using OIDC, this action will create a JWT unique to the workflow run, and it will use this JWT to assume the role. For this action to create the JWT, it is required for your workflow to have the `id-token: write` permission: -#### AssumeRoleWithWebIdentity and disable secure Action outputs ```yaml - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 + permissions: + id-token: write + contents: read +``` + +### Audience + +When the JWT is created, an audience needs to be specified. By default, the audience is `sts.amazon.com` and this will work for most cases. Changing the default audience may be necessary when using non-default AWS partitions. You can specify the audience through the `audience` input: + +```yaml + - name: Configure AWS Credentials for China region audience + uses: aws-actions/configure-aws-credentials@v3 with: - aws-region: us-east-2 + audience: sts.amazonaws.com.cn + aws-region: us-east-3 role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role - role-session-name: MySessionName - mask-aws-account-id: false ``` -In this example, account ID masking has been disabled. By default, the AWS -account ID will be obscured in the action's output. This may be helpful when -debugging action failures. -## Sample IAM OIDC CloudFormation Template -If you choose to use GitHub's OIDC provider, you must first set up federation +### Sample IAM OIDC CloudFormation Template +To use GitHub's OIDC provider, you must first set up federation with the provider in as an IAM IdP. The GitHub OIDC provider only needs to be created once per account (i.e. multiple IAM Roles that can be assumed by the GitHub's OIDC can share a single OIDC Provider). Note that the thumbprint has been set to all F's because the thumbprint is not -used when authenticating tokens.actions.githubusercontent.com. Instead, IAM +used when authenticating `tokens.actions.githubusercontent.com`. Instead, IAM uses its library of trusted CAs to authenticate. However, this value is still required by the API. @@ -282,16 +362,14 @@ Outputs: Value: !GetAtt Role.Arn ``` -To align with the Amazon IAM best practice of -[granting least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege), the assume role policy document should contain a -[`Condition`](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition.html) that specifies a subject allowed to assume the role. Without a subject -condition, any GitHub user or repository could potentially assume the role. The -subject can be scoped to a GitHub organization and repository as shown in the -CloudFormation template. Additional claim conditions can be added for higher -specificity as explained in the -[GitHub documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect). -Due to implementation details, not every OIDC claim is presently supported by -IAM. +### Claims and scoping permissions +To align with the Amazon IAM best practice of [granting least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege), the assume role policy document should contain a [`Condition`](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition.html) that specifies a subject (`sub`) allowed to assume the role. [GitHub also recommends](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#defining-trust-conditions-on-cloud-roles-using-oidc-claims) filtering for the correct audience (`aud`). See [AWS IAM documentation](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_iam-condition-keys.html#condition-keys-wif) on which claims you can filter for in your trust policies. + +Without a subject (`sub`) condition, any GitHub user or repository could potentially assume the role. The subject can be scoped to a GitHub organization and repository as shown in the CloudFormation template. However, scoping it down to your org and repo may cause the role assumption to fail in some cases. See [Example subject claims](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#example-subject-claims) for specific details on what the subject value will be depending on your workflow. You can also [customize your subject claim](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#customizing-the-token-claims) if you want full control over the information you can filter for in your trust policy. If you aren't sure what your subject (`sub`) key is, you can add the [`actions-oidc-debugger`](https://github.com/github/actions-oidc-debugger) action to your workflow to see the value of the subject (`sub`) key, as well as other claims. + +Additional claim conditions can be added for higher specificity as explained in the [GitHub documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect). Due to implementation details, not every OIDC claim is presently supported by IAM. + +### Further info For further information on OIDC and GitHub Actions, please see: @@ -301,82 +379,6 @@ For further information on OIDC and GitHub Actions, please see: * [GitHub docs: Configuring OpenID Connect in Amazon Web Services](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services) * [GitHub changelog: GitHub Actions: Secure cloud deployments with OpenID Connect](https://github.blog/changelog/2021-10-27-github-actions-secure-cloud-deployments-with-openid-connect/) -### Session tagging -The session will have the name "GitHubActions" and be tagged with the following -tags: (`GITHUB_` environment variable definitions can be -[found here](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-environment-variables#default-environment-variables)) - -| Key | Value | -| ---------- | ----------------- | -| GitHub | "Actions" | -| Repository | GITHUB_REPOSITORY | -| Workflow | GITHUB_WORKFLOW | -| Action | GITHUB_ACTION | -| Actor | GITHUB_ACTOR | -| Branch | GITHUB_REF | -| Commit | GITHUB_SHA | - -_Note: all tag values must conform to -[the requirements](https://docs.aws.amazon.com/STS/latest/APIReference/API_Tag.html). -Particularly, `GITHUB_WORKFLOW` will be truncated if it's too long. If -`GITHUB_ACTOR` or `GITHUB_WORKFLOW` contain invalid characters, the characters -will be replaced with an '*'._ - -The action will use session tagging by default during role assumption. -Note that for WebIdentity role assumption, the session tags have to be included -in the encoded WebIdentity token. This means that Tags can only be supplied by -the OIDC provider and not set during the AssumeRoleWithWebIdentity API call -within the Action. You can skip this session tagging by providing -`role-skip-session-tagging` as true in the action's inputs: -```yaml - uses: aws-actions/configure-aws-credentials@v2 - with: - role-skip-session-tagging: true -``` - -### Inline session policy -An IAM policy in stringified JSON format that you want to use as an inline session policy. -Depending on preferences, the JSON could be written on a single line like this: -```yaml - uses: aws-actions/configure-aws-credentials@v2 - with: - inline-session-policy: '{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:List*","Resource":"*"}]}' -``` -Or we can have a nicely formatted JSON as well: -```yaml - uses: aws-actions/configure-aws-credentials@v2 - with: - inline-session-policy: >- - { - "Version": "2012-10-17", - "Statement": [ - { - "Sid":"Stmt1", - "Effect":"Allow", - "Action":"s3:List*", - "Resource":"*" - } - ] - } -``` - -### Managed session policies -The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as managed session policies. -The policies must exist in the same account as the role. You can pass a single managed policy like this: -```yaml - uses: aws-actions/configure-aws-credentials@v2 - with: - managed-session-policies: arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess -``` -And we can pass multiple managed policies likes this: -```yaml - uses: aws-actions/configure-aws-credentials@v2 - with: - managed-session-policies: | - arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess - arn:aws:iam::aws:policy/AmazonS3OutpostsReadOnlyAccess -``` - ## Self-Hosted Runners If you run your GitHub Actions in a @@ -388,13 +390,13 @@ authenticate on your runner, this Action will as well. If no access key credentials are given in the action inputs, this action will use credentials from the runner environment using the -[default methods for the AWS SDK for Javascript](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html). +[default methods for the AWS SDK for Javascript](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/setting-credentials-node.html). You can use this action to simply configure the region and account ID in the environment, and then use the runner's credentials for all AWS API calls made by your Actions workflow: ```yaml -uses: aws-actions/configure-aws-credentials@v2 +uses: aws-actions/configure-aws-credentials@v3 with: aws-region: us-east-2 ``` @@ -404,7 +406,7 @@ APIs called by your Actions workflow. Or, you can use this action to assume a role, and then use the role credentials for all AWS API calls made by your Actions workflow: ```yaml -uses: aws-actions/configure-aws-credentials@v2 +uses: aws-actions/configure-aws-credentials@v3 with: aws-region: us-east-2 role-to-assume: my-github-actions-role @@ -427,7 +429,7 @@ environment. Manually configured proxy: ```yaml -uses: aws-actions/configure-aws-credentials@v2 +uses: aws-actions/configure-aws-credentials@v3 with: aws-region: us-east-2 role-to-assume: my-github-actions-role @@ -451,6 +453,78 @@ to executing `aws` commands need to have the AWS CLI if it's not already present. Most [GitHub hosted runner environments](https://github.com/actions/virtual-environments) should include the AWS CLI by default. + +## Examples + +### AssumeRoleWithWebIdentity (recommended) +```yaml + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-region: us-east-2 + role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role + role-session-name: MySessionName +``` +In this example, the Action will load the OIDC token from the GitHub-provided environment variable and use it to assume the role `arn:aws:iam::123456789100:role/my-github-actions-role` with the session name `MySessionName`. + +### AssumeRole with role previously assumed by action in same workflow +```yaml + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-region: us-east-2 + role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role + role-session-name: MySessionName + - name: Configure other AWS Credentials + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-region: us-east-2 + role-to-assume: arn:aws:iam::987654321000:role/my-second-role + role-session-name: MySessionName + role-chaining: true +``` +In this two-step example, the first step will use OIDC to assume the role `arn:aws:iam::123456789100:role/my-github-actions-role` just as in the prior example. Following that, a second step will use this role to assume a different role, `arn:aws:iam::987654321000:role/my-second-role`. + +### AssumeRole with static IAM credentials in repository secrets +```yaml + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} + role-external-id: ${{ secrets.AWS_ROLE_EXTERNAL_ID }} + role-duration-seconds: 1200 + role-session-name: MySessionName +``` +In this example, the secret `AWS_ROLE_TO_ASSUME` contains a string like `arn:aws:iam::123456789100:role/my-github-actions-role`. To assume a role in the same account as the static credentials, you can simply specify the role name, like `role-to-assume: my-github-actions-role`. + +### Retrieving credentials from step output, AssumeRole with temporary credentials +```yaml + - name: Configure AWS Credentials 1 + id: creds + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-region: us-east-2 + role-to-assume: arn:aws:iam::123456789100:role/my-github-actions-role + output-credentials: true + - name: get caller identity 1 + run: | + aws sts get-caller-identity + - name: Configure AWS Credentials 2 + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-region: us-east-2 + aws-access-key-id: ${{ steps.creds.outputs.aws-access-key-id }} + aws-secret-access-key: ${{ steps.creds.outputs.aws-secret-access-key }} + aws-session-token: ${{ steps.creds.outputs.aws-session-token }} + role-to-assume: arn:aws:iam::123456789100:role/my-other-github-actions-role + - name: get caller identity2 + run: | + aws sts get-caller-identity +``` +This example shows that you can reference the fetched credentials as outputs if `output-credentials` is set to true. This example also shows that you can use the `aws-session-token` input in a situation where session tokens are fetched and passed to this action. ## License Summary This code is made available under the MIT license. diff --git a/THIRD-PARTY b/THIRD-PARTY index 24c6a4ba8..31f1c38be 100644 --- a/THIRD-PARTY +++ b/THIRD-PARTY @@ -1,223 +1,11395 @@ -** AWS SDK for JavaScript; version 2.562.0 -- https://github.com/aws/aws-sdk-js -Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. +@actions/core +MIT +The MIT License (MIT) -Apache License - -Version 2.0, January 2004 +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@aws-crypto/crc32 +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-crypto/util +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/client-sso +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/client-sts +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/credential-provider-env +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-ini +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-process +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-sso +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-web-identity +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/middleware-host-header +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/middleware-logger +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/middleware-recursion-detection +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/middleware-sdk-sts +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/middleware-signing +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/middleware-user-agent +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/node-http-handler +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/token-providers +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/types +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/util-endpoints +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/util-user-agent-node +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/util-utf8-browser +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/config-resolver +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/credential-provider-imds +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/eventstream-codec +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/hash-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/is-array-buffer +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-content-length +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-endpoint +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-retry +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/middleware-serde +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/middleware-stack +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/node-config-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/node-http-handler +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/property-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/protocol-http +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/querystring-builder +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/querystring-parser +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/service-error-classification +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/shared-ini-file-loader +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/signature-v4 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/smithy-client +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/types +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/url-parser +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/util-base64 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-body-length-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-buffer-from +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-config-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-defaults-mode-node +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/util-hex-encoding +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-middleware +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. -http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND -DISTRIBUTION + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-retry +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. - "License" shall mean the terms and conditions for use, reproduction, and - distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by the - copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all other - entities that control, are controlled by, or are under common control - with that entity. For the purposes of this definition, "control" means - (i) the power, direct or indirect, to cause the direction or management - of such entity, whether by contract or otherwise, or (ii) ownership of - fifty percent (50%) or more of the outstanding shares, or (iii) - beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity exercising - permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation source, - and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but not limited - to compiled object code, generated documentation, and conversions to - other media types. - - "Work" shall mean the work of authorship, whether in Source or Object - form, made available under the License, as indicated by a copyright - notice that is included in or attached to the work (an example is - provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object form, - that is based on (or derived from) the Work and for which the editorial - revisions, annotations, elaborations, or other modifications represent, - as a whole, an original work of authorship. For the purposes of this - License, Derivative Works shall not include works that remain separable - from, or merely link (or bind by name) to the interfaces of, the Work and - Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including the original - version of the Work and any modifications or additions to that Work or - Derivative Works thereof, that is intentionally submitted to Licensor for - inclusion in the Work by the copyright owner or by an individual or Legal - Entity authorized to submit on behalf of the copyright owner. For the - purposes of this definition, "submitted" means any form of electronic, - verbal, or written communication sent to the Licensor or its - representatives, including but not limited to communication on electronic - mailing lists, source code control systems, and issue tracking systems - that are managed by, or on behalf of, the Licensor for the purpose of - discussing and improving the Work, but excluding communication that is - conspicuously marked or otherwise designated in writing by the copyright - owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity on - behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of this - License, each Contributor hereby grants to You a perpetual, worldwide, - non-exclusive, no-charge, royalty-free, irrevocable copyright license to - reproduce, prepare Derivative Works of, publicly display, publicly perform, - sublicense, and distribute the Work and such Derivative Works in Source or - Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of this - License, each Contributor hereby grants to You a perpetual, worldwide, - non-exclusive, no-charge, royalty-free, irrevocable (except as stated in - this section) patent license to make, have made, use, offer to sell, sell, - import, and otherwise transfer the Work, where such license applies only to - those patent claims licensable by such Contributor that are necessarily - infringed by their Contribution(s) alone or by combination of their - Contribution(s) with the Work to which such Contribution(s) was submitted. - If You institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work or a - Contribution incorporated within the Work constitutes direct or contributory - patent infringement, then any patent licenses granted to You under this - License for that Work shall terminate as of the date such litigation is - filed. - - 4. Redistribution. You may reproduce and distribute copies of the Work or - Derivative Works thereof in any medium, with or without modifications, and - in Source or Object form, provided that You meet the following conditions: - - (a) You must give any other recipients of the Work or Derivative Works a - copy of this License; and - - (b) You must cause any modified files to carry prominent notices stating - that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works that You - distribute, all copyright, patent, trademark, and attribution notices - from the Source form of the Work, excluding those notices that do not - pertain to any part of the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must include - a readable copy of the attribution notices contained within such NOTICE - file, excluding those notices that do not pertain to any part of the - Derivative Works, in at least one of the following places: within a - NOTICE text file distributed as part of the Derivative Works; within the - Source form or documentation, if provided along with the Derivative - Works; or, within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents of the - NOTICE file are for informational purposes only and do not modify the - License. You may add Your own attribution notices within Derivative Works - that You distribute, alongside or as an addendum to the NOTICE text from - the Work, provided that such additional attribution notices cannot be - construed as modifying the License. - - You may add Your own copyright statement to Your modifications and may - provide additional or different license terms and conditions for use, - reproduction, or distribution of Your modifications, or for any such - Derivative Works as a whole, provided Your use, reproduction, and - distribution of the Work otherwise complies with the conditions stated in - this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, any - Contribution intentionally submitted for inclusion in the Work by You to the - Licensor shall be under the terms and conditions of this License, without - any additional terms or conditions. Notwithstanding the above, nothing - herein shall supersede or modify the terms of any separate license agreement - you may have executed with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, except - as required for reasonable and customary use in describing the origin of the - Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in - writing, Licensor provides the Work (and each Contributor provides its - Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied, including, without limitation, any - warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or - FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining - the appropriateness of using or redistributing the Work and assume any risks - associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, whether - in tort (including negligence), contract, or otherwise, unless required by - applicable law (such as deliberate and grossly negligent acts) or agreed to - in writing, shall any Contributor be liable to You for damages, including - any direct, indirect, special, incidental, or consequential damages of any - character arising as a result of this License or out of the use or inability - to use the Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all other - commercial damages or losses), even if such Contributor has been advised of - the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing the Work - or Derivative Works thereof, You may choose to offer, and charge a fee for, - acceptance of support, warranty, indemnity, or other liability obligations - and/or rights consistent with this License. However, in accepting such - obligations, You may act only on Your own behalf and on Your sole - responsibility, not on behalf of any other Contributor, and only if You - agree to indemnify, defend, and hold each Contributor harmless for any - liability incurred by, or claims asserted against, such Contributor by - reason of your accepting any such warranty or additional liability. END OF - TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following boilerplate -notice, with the fields enclosed by brackets "[]" replaced with your own -identifying information. (Don't include the brackets!) The text should be -enclosed in the appropriate comment syntax for the file format. We also -recommend that a file or class name and description of purpose be included on -the same "printed page" as the copyright notice for easier identification -within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); - -you may not use this file except in compliance with the License. - -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software - -distributed under the License is distributed on an "AS IS" BASIS, - -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - -See the License for the specific language governing permissions and - -limitations under the License. - -* For AWS SDK for JavaScript see also this required NOTICE: - Copyright 2012-2018 Amazon.com, Inc. or its affiliates. All Rights - Reserved. - ------- - -** GitHub Actions Toolkit; version 1.2.0 -- https://github.com/actions/toolkit -Copyright 2019 GitHub - + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-stream +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-uri-escape +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-utf8 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@vercel/ncc +MIT +Copyright 2018 ZEIT, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +agent-base +MIT + +debug +MIT +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +fast-xml-parser +MIT +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +has-flag +MIT +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +https-proxy-agent +MIT + +ms +MIT +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +strnum +MIT MIT License -Copyright (c) +Copyright (c) 2021 Natural Intelligence -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. @@ -229,3 +11401,69 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +supports-color +MIT +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +tslib +0BSD +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +uuid +MIT +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/action.yml b/action.yml index 7b1fc8147..f4cff8099 100644 --- a/action.yml +++ b/action.yml @@ -1,76 +1,81 @@ -name: 'Configure AWS Credentials For GitHub Actions' -description: 'Configure AWS credential and region environment variables for use with the AWS CLI and AWS SDKs' + +name: '"Configure AWS Credentials" Action for GitHub Actions' +description: Configures AWS credentials for use in subsequent steps in a GitHub Action workflow +runs: + using: node16 + main: dist/index.js + post: dist/cleanup/index.js branding: - icon: 'cloud' - color: 'orange' + color: orange + icon: cloud inputs: - audience: - default: 'sts.amazonaws.com' - description: 'The audience to use for the OIDC provider' + aws-region: + description: AWS Region, e.g. us-east-2 + required: true + role-to-assume: + description: The Amazon Resource Name (ARN) of the role to assume. Use the provided credentials to assume an IAM role and configure the Actions environment with the assumed role credentials rather than with the provided credentials. required: false aws-access-key-id: - description: >- - AWS Access Key ID. This input is required if running in the GitHub hosted environment. - It is optional if running in a self-hosted environment that already has AWS credentials, - for example on an EC2 instance. + description: AWS Access Key ID. Provide this key if you want to assume a role using access keys rather than a web identity token. required: false aws-secret-access-key: - description: >- - AWS Secret Access Key. This input is required if running in the GitHub hosted environment. - It is optional if running in a self-hosted environment that already has AWS credentials, - for example on an EC2 instance. + description: AWS Secret Access Key. Required if aws-access-key-id is provided. required: false aws-session-token: - description: 'AWS Session Token' + description: AWS Session Token. required: false - aws-region: - description: 'AWS Region, e.g. us-east-2' - required: true - mask-aws-account-id: - description: >- - Whether to set the AWS account ID for these credentials as a secret value, - so that it is masked in logs. Valid values are 'true' and 'false'. - Defaults to true + web-identity-token-file: + description: Use the web identity token file from the provided file system path in order to assume an IAM role using a web identity, e.g. from within an Amazon EKS worker node. required: false - role-to-assume: - description: >- - Use the provided credentials to assume an IAM role and configure the Actions - environment with the assumed role credentials rather than with the provided - credentials + role-chaining: + description: Use existing credentials from the environment to assume a new role, rather than providing credentials as input. required: false - web-identity-token-file: - description: >- - Use the web identity token file from the provided file system path in order to - assume an IAM role using a web identity. E.g., from within an Amazon EKS worker node + audience: + description: The audience to use for the OIDC provider required: false - role-duration-seconds: - description: "Role duration in seconds (default: 6 hours, 1 hour for OIDC/specified aws-session-token)" + default: sts.amazonaws.com + http-proxy: + description: Proxy to use for the AWS SDK agent required: false - role-session-name: - description: 'Role session name (default: GitHubActions)' + mask-aws-account-id: + description: Whether to mask the AWS account ID for these credentials as a secret value. By default the account ID will not be masked required: false - role-external-id: - description: 'The external ID of the role to assume' + role-duration-seconds: + description: Role duration in seconds. Default is one hour. required: false - role-skip-session-tagging: - description: 'Skip session tagging during role assumption' + role-external-id: + description: The external ID of the role to assume. required: false - http-proxy: - description: 'Proxy to use for the AWS SDK agent' + role-session-name: + description: "Role session name (default: GitHubActions)" required: false - role-chaining: - description: 'Use existing credentials from the environment to assume a new role' + role-skip-session-tagging: + description: Skip session tagging during role assumption required: false inline-session-policy: - description: 'Inline session policy' + description: Define an inline session policy to use when assuming a role required: false managed-session-policies: - description: 'List of managed session policies' + description: Define a list of managed session policies to use when assuming a role + required: false + output-credentials: + description: Whether to set credentials as step output + required: false + unset-current-credentials: + description: Whether to unset the existing credentials in your runner. May be useful if you run this action multiple times in the same job + required: false + disable-retry: + description: Whether to disable the retry and backoff mechanism when the assume role call fails. By default the retry mechanism is enabled + required: false + retry-max-attempts: + description: The maximum number of attempts it will attempt to retry the assume role call. By default it will retry 12 times required: false outputs: aws-account-id: - description: 'The AWS account ID for the provided credentials' -runs: - using: 'node16' - main: 'dist/index.js' - post: 'dist/cleanup/index.js' + description: The AWS account ID for the provided credentials + aws-access-key-id: + description: The AWS access key ID for the provided credentials + aws-secret-access-key: + description: The AWS secret access key for the provided credentials + aws-session-token: + description: The AWS session token for the provided credentials diff --git a/cleanup.test.js b/cleanup.test.js deleted file mode 100644 index a84eaaf5d..000000000 --- a/cleanup.test.js +++ /dev/null @@ -1,51 +0,0 @@ -const core = require('@actions/core'); -const cleanup = require('./cleanup.js'); - -jest.mock('@actions/core'); - -const FAKE_ACCESS_KEY_ID = 'MY-AWS-ACCESS-KEY-ID'; -const FAKE_SECRET_ACCESS_KEY = 'MY-AWS-SECRET-ACCESS-KEY'; -const FAKE_SESSION_TOKEN = 'MY-AWS-SESSION-TOKEN'; -const FAKE_REGION = 'fake-region-1'; -const ACTION_ENVIRONMENT_VARIABLES = { - AWS_ACCESS_KEY_ID: FAKE_ACCESS_KEY_ID, - AWS_SECRET_ACCESS_KEY: FAKE_SECRET_ACCESS_KEY, - AWS_SESSION_TOKEN: FAKE_SESSION_TOKEN, - AWS_DEFAULT_REGION: FAKE_REGION, - AWS_REGION: FAKE_REGION, -}; - -describe('Configure AWS Credentials', () => { - const OLD_ENV = process.env; - - beforeEach(() => { - jest.resetModules(); - process.env = {...OLD_ENV, ...ACTION_ENVIRONMENT_VARIABLES}; - }); - - afterEach(() => { - process.env = OLD_ENV; - }); - - test('replaces AWS credential and region env vars with empty strings', async () => { - await cleanup(); - expect(core.setFailed).toHaveBeenCalledTimes(0); - expect(core.exportVariable).toHaveBeenCalledTimes(5); - expect(core.exportVariable).toHaveBeenCalledWith('AWS_ACCESS_KEY_ID', ''); - expect(core.exportVariable).toHaveBeenCalledWith('AWS_SECRET_ACCESS_KEY', ''); - expect(core.exportVariable).toHaveBeenCalledWith('AWS_SESSION_TOKEN', ''); - expect(core.exportVariable).toHaveBeenCalledWith('AWS_DEFAULT_REGION', ''); - expect(core.exportVariable).toHaveBeenCalledWith('AWS_REGION', ''); - }); - - test('error is caught and fails the action', async () => { - core.exportVariable.mockReset(); - core.exportVariable.mockImplementation(() => { - throw new Error(); - }); - - await cleanup(); - - expect(core.setFailed).toBeCalled(); - }); -}); diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index 9a8b47f97..1b0dd75c2 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -1,50 +1,7 @@ /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ -/***/ 722: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const core = __nccwpck_require__(186); - -/** - * When the GitHub Actions job is done, clean up any environment variables that - * may have been set by the configure-aws-credentials steps in the job. - * - * Environment variables are not intended to be shared across different jobs in - * the same GitHub Actions workflow: GitHub Actions documentation states that - * each job runs in a fresh instance. However, doing our own cleanup will - * give us additional assurance that these environment variables are not shared - * with any other jobs. - */ - -async function cleanup() { - try { - // The GitHub Actions toolkit does not have an option to completely unset - // environment variables, so we overwrite the current value with an empty - // string. The AWS CLI and AWS SDKs will behave correctly: they treat an - // empty string value as if the environment variable does not exist. - core.exportVariable('AWS_ACCESS_KEY_ID', ''); - core.exportVariable('AWS_SECRET_ACCESS_KEY', ''); - core.exportVariable('AWS_SESSION_TOKEN', ''); - core.exportVariable('AWS_DEFAULT_REGION', ''); - core.exportVariable('AWS_REGION', ''); - } - catch (error) { - core.setFailed(error.message); - } -} - -module.exports = cleanup; - -/* istanbul ignore next */ -if (require.main === require.cache[eval('__filename')]) { - cleanup(); -} - - -/***/ }), - -/***/ 351: +/***/ 7351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -70,8 +27,8 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(37)); -const utils_1 = __nccwpck_require__(278); +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(5278); /** * Commands * @@ -143,7 +100,7 @@ function escapeProperty(s) { /***/ }), -/***/ 186: +/***/ 2186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -178,12 +135,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(351); +const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(278); -const os = __importStar(__nccwpck_require__(37)); -const path = __importStar(__nccwpck_require__(17)); -const oidc_utils_1 = __nccwpck_require__(41); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action */ @@ -468,17 +425,17 @@ exports.getIDToken = getIDToken; /** * Summary exports */ -var summary_1 = __nccwpck_require__(327); +var summary_1 = __nccwpck_require__(1327); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ -var summary_2 = __nccwpck_require__(327); +var summary_2 = __nccwpck_require__(1327); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports */ -var path_utils_1 = __nccwpck_require__(981); +var path_utils_1 = __nccwpck_require__(2981); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); @@ -515,10 +472,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(147)); -const os = __importStar(__nccwpck_require__(37)); -const uuid_1 = __nccwpck_require__(840); -const utils_1 = __nccwpck_require__(278); +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const uuid_1 = __nccwpck_require__(5840); +const utils_1 = __nccwpck_require__(5278); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { @@ -551,7 +508,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage; /***/ }), -/***/ 41: +/***/ 8041: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -567,9 +524,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(255); -const auth_1 = __nccwpck_require__(526); -const core_1 = __nccwpck_require__(186); +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); +const core_1 = __nccwpck_require__(2186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { @@ -635,7 +592,7 @@ exports.OidcClient = OidcClient; /***/ }), -/***/ 981: +/***/ 2981: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -661,7 +618,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(17)); +const path = __importStar(__nccwpck_require__(1017)); /** * toPosixPath converts the given path to the posix form. On Windows, \\ will be * replaced with /. @@ -700,7 +657,7 @@ exports.toPlatformPath = toPlatformPath; /***/ }), -/***/ 327: +/***/ 1327: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -716,8 +673,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(37); -const fs_1 = __nccwpck_require__(147); +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); const { access, appendFile, writeFile } = fs_1.promises; exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; @@ -990,7 +947,7 @@ exports.summary = _summary; /***/ }), -/***/ 278: +/***/ 5278: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -1037,7 +994,7 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 526: +/***/ 5526: /***/ (function(__unused_webpack_module, exports) { "use strict"; @@ -1125,7 +1082,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /***/ }), -/***/ 255: +/***/ 6255: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1161,10 +1118,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(685)); -const https = __importStar(__nccwpck_require__(687)); -const pm = __importStar(__nccwpck_require__(835)); -const tunnel = __importStar(__nccwpck_require__(294)); +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -1254,6 +1211,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -1737,7 +1707,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 835: +/***/ 9835: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -1758,7 +1728,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -1820,191 +1796,18026 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2374: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(219); +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsCrc32 = void 0; +var tslib_1 = __nccwpck_require__(5066); +var util_1 = __nccwpck_require__(1236); +var index_1 = __nccwpck_require__(7327); +var AwsCrc32 = /** @class */ (function () { + function AwsCrc32() { + this.crc32 = new index_1.Crc32(); + } + AwsCrc32.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash)) + return; + this.crc32.update((0, util_1.convertToBuffer)(toHash)); + }; + AwsCrc32.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, (0, util_1.numToUint8)(this.crc32.digest())]; + }); + }); + }; + AwsCrc32.prototype.reset = function () { + this.crc32 = new index_1.Crc32(); + }; + return AwsCrc32; +}()); +exports.AwsCrc32 = AwsCrc32; +//# sourceMappingURL=aws_crc32.js.map /***/ }), -/***/ 219: +/***/ 7327: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsCrc32 = exports.Crc32 = exports.crc32 = void 0; +var tslib_1 = __nccwpck_require__(5066); +var util_1 = __nccwpck_require__(1236); +function crc32(data) { + return new Crc32().update(data).digest(); +} +exports.crc32 = crc32; +var Crc32 = /** @class */ (function () { + function Crc32() { + this.checksum = 0xffffffff; + } + Crc32.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = tslib_1.__values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); + } + finally { if (e_1) throw e_1.error; } + } + return this; + }; + Crc32.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32; +}()); +exports.Crc32 = Crc32; +// prettier-ignore +var a_lookUpTable = [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, + 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, + 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, + 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, + 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, + 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, + 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, + 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, + 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, + 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, + 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, + 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, + 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, + 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, + 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, + 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, + 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, + 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, + 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, + 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, + 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, + 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, + 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, + 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, + 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, + 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, + 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, + 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, + 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, + 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, + 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, + 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, + 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, + 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, + 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, + 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, + 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, + 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, + 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, + 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, + 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, + 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, + 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, + 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, + 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, + 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, + 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, + 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, + 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, + 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, + 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, + 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D, +]; +var lookupTable = (0, util_1.uint32ArrayFrom)(a_lookUpTable); +var aws_crc32_1 = __nccwpck_require__(2374); +Object.defineProperty(exports, "AwsCrc32", ({ enumerable: true, get: function () { return aws_crc32_1.AwsCrc32; } })); +//# sourceMappingURL=index.js.map -var net = __nccwpck_require__(808); -var tls = __nccwpck_require__(404); -var http = __nccwpck_require__(685); -var https = __nccwpck_require__(687); -var events = __nccwpck_require__(361); -var assert = __nccwpck_require__(491); -var util = __nccwpck_require__(837); +/***/ }), +/***/ 5066: +/***/ ((module) => { -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + + __extends = function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); + __createBinding = function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }; -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + __exportStar = function (m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; + }; - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; - function onFree() { - self.emit('free', socket, options); - } + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; + }; - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), + +/***/ 3228: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertToBuffer = void 0; +var util_utf8_browser_1 = __nccwpck_require__(8172); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_browser_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map + +/***/ }), + +/***/ 1236: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = __nccwpck_require__(3228); +Object.defineProperty(exports, "convertToBuffer", ({ enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } })); +var isEmptyData_1 = __nccwpck_require__(8275); +Object.defineProperty(exports, "isEmptyData", ({ enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } })); +var numToUint8_1 = __nccwpck_require__(3775); +Object.defineProperty(exports, "numToUint8", ({ enumerable: true, get: function () { return numToUint8_1.numToUint8; } })); +var uint32ArrayFrom_1 = __nccwpck_require__(9404); +Object.defineProperty(exports, "uint32ArrayFrom", ({ enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 8275: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map + +/***/ }), + +/***/ 3775: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map + +/***/ }), + +/***/ 9404: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map + +/***/ }), + +/***/ 9838: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSO = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const GetRoleCredentialsCommand_1 = __nccwpck_require__(8972); +const ListAccountRolesCommand_1 = __nccwpck_require__(1513); +const ListAccountsCommand_1 = __nccwpck_require__(5009); +const LogoutCommand_1 = __nccwpck_require__(2586); +const SSOClient_1 = __nccwpck_require__(1057); +const commands = { + GetRoleCredentialsCommand: GetRoleCredentialsCommand_1.GetRoleCredentialsCommand, + ListAccountRolesCommand: ListAccountRolesCommand_1.ListAccountRolesCommand, + ListAccountsCommand: ListAccountsCommand_1.ListAccountsCommand, + LogoutCommand: LogoutCommand_1.LogoutCommand, +}; +class SSO extends SSOClient_1.SSOClient { +} +exports.SSO = SSO; +(0, smithy_client_1.createAggregatedClient)(commands, SSO); + + +/***/ }), + +/***/ 1057: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOClient = exports.__Client = void 0; +const middleware_host_header_1 = __nccwpck_require__(2545); +const middleware_logger_1 = __nccwpck_require__(14); +const middleware_recursion_detection_1 = __nccwpck_require__(5525); +const middleware_user_agent_1 = __nccwpck_require__(4688); +const config_resolver_1 = __nccwpck_require__(3098); +const middleware_content_length_1 = __nccwpck_require__(2800); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_retry_1 = __nccwpck_require__(6039); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "__Client", ({ enumerable: true, get: function () { return smithy_client_1.Client; } })); +const EndpointParameters_1 = __nccwpck_require__(4214); +const runtimeConfig_1 = __nccwpck_require__(9756); +class SSOClient extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.SSOClient = SSOClient; + + +/***/ }), + +/***/ 8972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetRoleCredentialsCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(6390); +const Aws_restJson1_1 = __nccwpck_require__(8507); +class GetRoleCredentialsCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetRoleCredentialsCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "GetRoleCredentialsCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.GetRoleCredentialsRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.GetRoleCredentialsResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_GetRoleCredentialsCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_GetRoleCredentialsCommand)(output, context); + } +} +exports.GetRoleCredentialsCommand = GetRoleCredentialsCommand; + + +/***/ }), + +/***/ 1513: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ListAccountRolesCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(6390); +const Aws_restJson1_1 = __nccwpck_require__(8507); +class ListAccountRolesCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ListAccountRolesCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "ListAccountRolesCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.ListAccountRolesRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_ListAccountRolesCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_ListAccountRolesCommand)(output, context); + } +} +exports.ListAccountRolesCommand = ListAccountRolesCommand; + + +/***/ }), + +/***/ 5009: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ListAccountsCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(6390); +const Aws_restJson1_1 = __nccwpck_require__(8507); +class ListAccountsCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ListAccountsCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "ListAccountsCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.ListAccountsRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_ListAccountsCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_ListAccountsCommand)(output, context); + } +} +exports.ListAccountsCommand = ListAccountsCommand; + + +/***/ }), + +/***/ 2586: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LogoutCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(6390); +const Aws_restJson1_1 = __nccwpck_require__(8507); +class LogoutCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, LogoutCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "LogoutCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.LogoutRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_LogoutCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_LogoutCommand)(output, context); + } +} +exports.LogoutCommand = LogoutCommand; + + +/***/ }), + +/***/ 5706: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(8972), exports); +tslib_1.__exportStar(__nccwpck_require__(1513), exports); +tslib_1.__exportStar(__nccwpck_require__(5009), exports); +tslib_1.__exportStar(__nccwpck_require__(2586), exports); + + +/***/ }), + +/***/ 4214: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }; +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; + + +/***/ }), + +/***/ 898: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3350); +const ruleset_1 = __nccwpck_require__(3341); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + }); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; + + +/***/ }), + +/***/ 3341: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const p = "required", q = "fn", r = "argv", s = "ref"; +const a = "PartitionResult", b = "tree", c = "error", d = "endpoint", e = { [p]: false, "type": "String" }, f = { [p]: true, "default": false, "type": "Boolean" }, g = { [s]: "Endpoint" }, h = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }, i = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }, j = {}, k = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsFIPS"] }] }, l = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsDualStack"] }] }, m = [g], n = [h], o = [i]; +const _data = { version: "1.0", parameters: { Region: e, UseDualStack: f, UseFIPS: f, Endpoint: e }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: a }], type: b, rules: [{ conditions: [{ [q]: "isSet", [r]: m }, { [q]: "parseURL", [r]: m, assign: "url" }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: g, properties: j, headers: j }, type: d }] }] }, { conditions: [h, i], type: b, rules: [{ conditions: [k, l], type: b, rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [k], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [l], type: b, rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 8197: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOServiceException = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(1057), exports); +tslib_1.__exportStar(__nccwpck_require__(9838), exports); +tslib_1.__exportStar(__nccwpck_require__(5706), exports); +tslib_1.__exportStar(__nccwpck_require__(6773), exports); +tslib_1.__exportStar(__nccwpck_require__(4952), exports); +var SSOServiceException_1 = __nccwpck_require__(1517); +Object.defineProperty(exports, "SSOServiceException", ({ enumerable: true, get: function () { return SSOServiceException_1.SSOServiceException; } })); + + +/***/ }), + +/***/ 1517: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOServiceException = exports.__ServiceException = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "__ServiceException", ({ enumerable: true, get: function () { return smithy_client_1.ServiceException; } })); +class SSOServiceException extends smithy_client_1.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} +exports.SSOServiceException = SSOServiceException; + + +/***/ }), + +/***/ 4952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(6390), exports); + + +/***/ }), + +/***/ 6390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LogoutRequestFilterSensitiveLog = exports.ListAccountsRequestFilterSensitiveLog = exports.ListAccountRolesRequestFilterSensitiveLog = exports.GetRoleCredentialsResponseFilterSensitiveLog = exports.RoleCredentialsFilterSensitiveLog = exports.GetRoleCredentialsRequestFilterSensitiveLog = exports.UnauthorizedException = exports.TooManyRequestsException = exports.ResourceNotFoundException = exports.InvalidRequestException = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const SSOServiceException_1 = __nccwpck_require__(1517); +class InvalidRequestException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +exports.InvalidRequestException = InvalidRequestException; +class ResourceNotFoundException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + this.name = "ResourceNotFoundException"; + this.$fault = "client"; + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +exports.ResourceNotFoundException = ResourceNotFoundException; +class TooManyRequestsException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + this.name = "TooManyRequestsException"; + this.$fault = "client"; + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +exports.TooManyRequestsException = TooManyRequestsException; +class UnauthorizedException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + this.name = "UnauthorizedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +exports.UnauthorizedException = UnauthorizedException; +const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.GetRoleCredentialsRequestFilterSensitiveLog = GetRoleCredentialsRequestFilterSensitiveLog; +const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: smithy_client_1.SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.RoleCredentialsFilterSensitiveLog = RoleCredentialsFilterSensitiveLog; +const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: (0, exports.RoleCredentialsFilterSensitiveLog)(obj.roleCredentials) }), +}); +exports.GetRoleCredentialsResponseFilterSensitiveLog = GetRoleCredentialsResponseFilterSensitiveLog; +const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.ListAccountRolesRequestFilterSensitiveLog = ListAccountRolesRequestFilterSensitiveLog; +const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.ListAccountsRequestFilterSensitiveLog = ListAccountsRequestFilterSensitiveLog; +const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.LogoutRequestFilterSensitiveLog = LogoutRequestFilterSensitiveLog; + + +/***/ }), + +/***/ 849: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.paginateListAccountRoles = void 0; +const ListAccountRolesCommand_1 = __nccwpck_require__(1513); +const SSOClient_1 = __nccwpck_require__(1057); +const makePagedClientRequest = async (client, input, ...args) => { + return await client.send(new ListAccountRolesCommand_1.ListAccountRolesCommand(input), ...args); +}; +async function* paginateListAccountRoles(config, input, ...additionalArguments) { + let token = config.startingToken || undefined; + let hasNext = true; + let page; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof SSOClient_1.SSOClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } + else { + throw new Error("Invalid client, expected SSO | SSOClient"); + } + yield page; + const prevToken = token; + token = page.nextToken; + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; +} +exports.paginateListAccountRoles = paginateListAccountRoles; + + +/***/ }), + +/***/ 938: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.paginateListAccounts = void 0; +const ListAccountsCommand_1 = __nccwpck_require__(5009); +const SSOClient_1 = __nccwpck_require__(1057); +const makePagedClientRequest = async (client, input, ...args) => { + return await client.send(new ListAccountsCommand_1.ListAccountsCommand(input), ...args); +}; +async function* paginateListAccounts(config, input, ...additionalArguments) { + let token = config.startingToken || undefined; + let hasNext = true; + let page; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof SSOClient_1.SSOClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } + else { + throw new Error("Invalid client, expected SSO | SSOClient"); + } + yield page; + const prevToken = token; + token = page.nextToken; + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; +} +exports.paginateListAccounts = paginateListAccounts; + + +/***/ }), + +/***/ 6773: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(849), exports); +tslib_1.__exportStar(__nccwpck_require__(8460), exports); +tslib_1.__exportStar(__nccwpck_require__(938), exports); + + +/***/ }), + +/***/ 8507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.de_LogoutCommand = exports.de_ListAccountsCommand = exports.de_ListAccountRolesCommand = exports.de_GetRoleCredentialsCommand = exports.se_LogoutCommand = exports.se_ListAccountsCommand = exports.se_ListAccountRolesCommand = exports.se_GetRoleCredentialsCommand = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const smithy_client_1 = __nccwpck_require__(3570); +const models_0_1 = __nccwpck_require__(6390); +const SSOServiceException_1 = __nccwpck_require__(1517); +const se_GetRoleCredentialsCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/federation/credentials"; + const query = (0, smithy_client_1.map)({ + role_name: [, (0, smithy_client_1.expectNonNull)(input.roleName, `roleName`)], + account_id: [, (0, smithy_client_1.expectNonNull)(input.accountId, `accountId`)], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_GetRoleCredentialsCommand = se_GetRoleCredentialsCommand; +const se_ListAccountRolesCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/assignment/roles"; + const query = (0, smithy_client_1.map)({ + next_token: [, input.nextToken], + max_result: [() => input.maxResults !== void 0, () => input.maxResults.toString()], + account_id: [, (0, smithy_client_1.expectNonNull)(input.accountId, `accountId`)], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_ListAccountRolesCommand = se_ListAccountRolesCommand; +const se_ListAccountsCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/assignment/accounts"; + const query = (0, smithy_client_1.map)({ + next_token: [, input.nextToken], + max_result: [() => input.maxResults !== void 0, () => input.maxResults.toString()], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_ListAccountsCommand = se_ListAccountsCommand; +const se_LogoutCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/logout"; + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; +exports.se_LogoutCommand = se_LogoutCommand; +const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_GetRoleCredentialsCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + roleCredentials: smithy_client_1._json, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_GetRoleCredentialsCommand = de_GetRoleCredentialsCommand; +const de_GetRoleCredentialsCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_ListAccountRolesCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + nextToken: smithy_client_1.expectString, + roleList: smithy_client_1._json, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_ListAccountRolesCommand = de_ListAccountRolesCommand; +const de_ListAccountRolesCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_ListAccountsCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + accountList: smithy_client_1._json, + nextToken: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_ListAccountsCommand = de_ListAccountsCommand; +const de_ListAccountsCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_LogoutCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + await (0, smithy_client_1.collectBody)(output.body, context); + return contents; +}; +exports.de_LogoutCommand = de_LogoutCommand; +const de_LogoutCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = (0, smithy_client_1.withBaseException)(SSOServiceException_1.SSOServiceException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => (0, smithy_client_1.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +const isSerializableHeaderValue = (value) => value !== undefined && + value !== null && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); +const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; +}); +const parseErrorBody = async (errorBody, context) => { + const value = await parseBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; + + +/***/ }), + +/***/ 9756: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(4351); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(1092)); +const util_user_agent_node_1 = __nccwpck_require__(8095); +const config_resolver_1 = __nccwpck_require__(3098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_1 = __nccwpck_require__(6039); +const node_config_provider_1 = __nccwpck_require__(3461); +const node_http_handler_1 = __nccwpck_require__(258); +const util_body_length_node_1 = __nccwpck_require__(8075); +const util_retry_1 = __nccwpck_require__(4902); +const runtimeConfig_shared_1 = __nccwpck_require__(4809); +const smithy_client_1 = __nccwpck_require__(3570); +const util_defaults_mode_node_1 = __nccwpck_require__(2429); +const smithy_client_2 = __nccwpck_require__(3570); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: config?.region ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: config?.requestHandler ?? new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 4809: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const url_parser_1 = __nccwpck_require__(4681); +const util_base64_1 = __nccwpck_require__(5600); +const util_utf8_1 = __nccwpck_require__(1895); +const endpointResolver_1 = __nccwpck_require__(898); +const getRuntimeConfig = (config) => ({ + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, +}); +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 7588: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STS = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const AssumeRoleCommand_1 = __nccwpck_require__(9802); +const AssumeRoleWithSAMLCommand_1 = __nccwpck_require__(2865); +const AssumeRoleWithWebIdentityCommand_1 = __nccwpck_require__(7451); +const DecodeAuthorizationMessageCommand_1 = __nccwpck_require__(4150); +const GetAccessKeyInfoCommand_1 = __nccwpck_require__(9804); +const GetCallerIdentityCommand_1 = __nccwpck_require__(4278); +const GetFederationTokenCommand_1 = __nccwpck_require__(7552); +const GetSessionTokenCommand_1 = __nccwpck_require__(3285); +const STSClient_1 = __nccwpck_require__(4195); +const commands = { + AssumeRoleCommand: AssumeRoleCommand_1.AssumeRoleCommand, + AssumeRoleWithSAMLCommand: AssumeRoleWithSAMLCommand_1.AssumeRoleWithSAMLCommand, + AssumeRoleWithWebIdentityCommand: AssumeRoleWithWebIdentityCommand_1.AssumeRoleWithWebIdentityCommand, + DecodeAuthorizationMessageCommand: DecodeAuthorizationMessageCommand_1.DecodeAuthorizationMessageCommand, + GetAccessKeyInfoCommand: GetAccessKeyInfoCommand_1.GetAccessKeyInfoCommand, + GetCallerIdentityCommand: GetCallerIdentityCommand_1.GetCallerIdentityCommand, + GetFederationTokenCommand: GetFederationTokenCommand_1.GetFederationTokenCommand, + GetSessionTokenCommand: GetSessionTokenCommand_1.GetSessionTokenCommand, +}; +class STS extends STSClient_1.STSClient { +} +exports.STS = STS; +(0, smithy_client_1.createAggregatedClient)(commands, STS); + + +/***/ }), + +/***/ 4195: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = __nccwpck_require__(2545); +const middleware_logger_1 = __nccwpck_require__(14); +const middleware_recursion_detection_1 = __nccwpck_require__(5525); +const middleware_sdk_sts_1 = __nccwpck_require__(5959); +const middleware_user_agent_1 = __nccwpck_require__(4688); +const config_resolver_1 = __nccwpck_require__(3098); +const middleware_content_length_1 = __nccwpck_require__(2800); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_retry_1 = __nccwpck_require__(6039); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "__Client", ({ enumerable: true, get: function () { return smithy_client_1.Client; } })); +const EndpointParameters_1 = __nccwpck_require__(510); +const runtimeConfig_1 = __nccwpck_require__(3405); +class STSClient extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_sdk_sts_1.resolveStsAuthConfig)(_config_5, { stsClientCtor: STSClient }); + const _config_7 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_6); + super(_config_7); + this.config = _config_7; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; + + +/***/ }), + +/***/ 9802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(1780); +const Aws_query_1 = __nccwpck_require__(740); +class AssumeRoleCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.AssumeRoleResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleCommand)(output, context); + } +} +exports.AssumeRoleCommand = AssumeRoleCommand; + + +/***/ }), + +/***/ 2865: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleWithSAMLCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(1780); +const Aws_query_1 = __nccwpck_require__(740); +class AssumeRoleWithSAMLCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleWithSAMLCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleWithSAMLCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.AssumeRoleWithSAMLRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.AssumeRoleWithSAMLResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleWithSAMLCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleWithSAMLCommand)(output, context); + } +} +exports.AssumeRoleWithSAMLCommand = AssumeRoleWithSAMLCommand; + + +/***/ }), + +/***/ 7451: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleWithWebIdentityCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(1780); +const Aws_query_1 = __nccwpck_require__(740); +class AssumeRoleWithWebIdentityCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleWithWebIdentityCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleWithWebIdentityCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleWithWebIdentityCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleWithWebIdentityCommand)(output, context); + } +} +exports.AssumeRoleWithWebIdentityCommand = AssumeRoleWithWebIdentityCommand; + + +/***/ }), + +/***/ 4150: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DecodeAuthorizationMessageCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(740); +class DecodeAuthorizationMessageCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, DecodeAuthorizationMessageCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "DecodeAuthorizationMessageCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_DecodeAuthorizationMessageCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_DecodeAuthorizationMessageCommand)(output, context); + } +} +exports.DecodeAuthorizationMessageCommand = DecodeAuthorizationMessageCommand; + + +/***/ }), + +/***/ 9804: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetAccessKeyInfoCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(740); +class GetAccessKeyInfoCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetAccessKeyInfoCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetAccessKeyInfoCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetAccessKeyInfoCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetAccessKeyInfoCommand)(output, context); + } +} +exports.GetAccessKeyInfoCommand = GetAccessKeyInfoCommand; + + +/***/ }), + +/***/ 4278: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetCallerIdentityCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(740); +class GetCallerIdentityCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetCallerIdentityCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetCallerIdentityCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetCallerIdentityCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetCallerIdentityCommand)(output, context); + } +} +exports.GetCallerIdentityCommand = GetCallerIdentityCommand; + + +/***/ }), + +/***/ 7552: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetFederationTokenCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(1780); +const Aws_query_1 = __nccwpck_require__(740); +class GetFederationTokenCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetFederationTokenCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetFederationTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.GetFederationTokenResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetFederationTokenCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetFederationTokenCommand)(output, context); + } +} +exports.GetFederationTokenCommand = GetFederationTokenCommand; + + +/***/ }), + +/***/ 3285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetSessionTokenCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(1780); +const Aws_query_1 = __nccwpck_require__(740); +class GetSessionTokenCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetSessionTokenCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetSessionTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.GetSessionTokenResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetSessionTokenCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetSessionTokenCommand)(output, context); + } +} +exports.GetSessionTokenCommand = GetSessionTokenCommand; + + +/***/ }), + +/***/ 5716: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9802), exports); +tslib_1.__exportStar(__nccwpck_require__(2865), exports); +tslib_1.__exportStar(__nccwpck_require__(7451), exports); +tslib_1.__exportStar(__nccwpck_require__(4150), exports); +tslib_1.__exportStar(__nccwpck_require__(9804), exports); +tslib_1.__exportStar(__nccwpck_require__(4278), exports); +tslib_1.__exportStar(__nccwpck_require__(7552), exports); +tslib_1.__exportStar(__nccwpck_require__(3285), exports); + + +/***/ }), + +/***/ 8028: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateDefaultCredentialProvider = exports.getDefaultRoleAssumerWithWebIdentity = exports.getDefaultRoleAssumer = void 0; +const defaultStsRoleAssumers_1 = __nccwpck_require__(48); +const STSClient_1 = __nccwpck_require__(4195); +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => (0, defaultStsRoleAssumers_1.getDefaultRoleAssumer)(stsOptions, getCustomizableStsClientCtor(STSClient_1.STSClient, stsPlugins)); +exports.getDefaultRoleAssumer = getDefaultRoleAssumer; +const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => (0, defaultStsRoleAssumers_1.getDefaultRoleAssumerWithWebIdentity)(stsOptions, getCustomizableStsClientCtor(STSClient_1.STSClient, stsPlugins)); +exports.getDefaultRoleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity; +const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: (0, exports.getDefaultRoleAssumer)(input), + roleAssumerWithWebIdentity: (0, exports.getDefaultRoleAssumerWithWebIdentity)(input), + ...input, +}); +exports.decorateDefaultCredentialProvider = decorateDefaultCredentialProvider; + + +/***/ }), + +/***/ 48: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateDefaultCredentialProvider = exports.getDefaultRoleAssumerWithWebIdentity = exports.getDefaultRoleAssumer = void 0; +const AssumeRoleCommand_1 = __nccwpck_require__(9802); +const AssumeRoleWithWebIdentityCommand_1 = __nccwpck_require__(7451); +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const decorateDefaultRegion = (region) => { + if (typeof region !== "function") { + return region === undefined ? ASSUME_ROLE_DEFAULT_REGION : region; + } + return async () => { + try { + return await region(); + } + catch (e) { + return ASSUME_ROLE_DEFAULT_REGION; + } + }; +}; +const getDefaultRoleAssumer = (stsOptions, stsClientCtor) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger, region, requestHandler } = stsOptions; + stsClient = new stsClientCtor({ + logger, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: decorateDefaultRegion(region || stsOptions.region), + ...(requestHandler ? { requestHandler } : {}), + }); + } + const { Credentials } = await stsClient.send(new AssumeRoleCommand_1.AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + return { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + }; + }; +}; +exports.getDefaultRoleAssumer = getDefaultRoleAssumer; +const getDefaultRoleAssumerWithWebIdentity = (stsOptions, stsClientCtor) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger, region, requestHandler } = stsOptions; + stsClient = new stsClientCtor({ + logger, + region: decorateDefaultRegion(region || stsOptions.region), + ...(requestHandler ? { requestHandler } : {}), + }); + } + const { Credentials } = await stsClient.send(new AssumeRoleWithWebIdentityCommand_1.AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + return { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + }; + }; +}; +exports.getDefaultRoleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity; +const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: (0, exports.getDefaultRoleAssumer)(input, input.stsClientCtor), + roleAssumerWithWebIdentity: (0, exports.getDefaultRoleAssumerWithWebIdentity)(input, input.stsClientCtor), + ...input, +}); +exports.decorateDefaultCredentialProvider = decorateDefaultCredentialProvider; + + +/***/ }), + +/***/ 510: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }; +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; + + +/***/ }), + +/***/ 1203: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3350); +const ruleset_1 = __nccwpck_require__(6882); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + }); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; + + +/***/ }), + +/***/ 6882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "tree", e = "stringEquals", f = "sigv4", g = "sts", h = "us-east-1", i = "endpoint", j = "https://sts.{Region}.{PartitionResult#dnsSuffix}", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": f, "signingName": g, "signingRegion": h }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: e, [I]: [q, "aws-global"] }], [i]: u, [G]: i }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: c, [I]: [true, { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], [G]: d, rules: [{ conditions: [{ [H]: e, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: i }, w, { conditions: [{ [H]: e, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, h] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-east-2"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-west-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-west-2"] }], endpoint: u, [G]: i }, { endpoint: { url: j, properties: { authSchemes: [{ name: f, signingName: g, signingRegion: "{Region}" }] }, headers: v }, [G]: i }] }, { conditions: C, [G]: d, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { [G]: d, rules: [{ conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: i }] }] }, { [G]: d, rules: [{ conditions: [p], [G]: d, rules: [{ conditions: [r], [G]: d, rules: [{ conditions: [x, y], [G]: d, rules: [{ conditions: [z, B], [G]: d, rules: [{ [G]: d, rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }] }, { conditions: D, [G]: d, rules: [{ conditions: [z], [G]: d, rules: [{ [G]: d, rules: [{ conditions: [{ [H]: e, [I]: ["aws-us-gov", { [H]: l, [I]: [A, "name"] }] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: i }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }] }, { conditions: E, [G]: d, rules: [{ conditions: [B], [G]: d, rules: [{ [G]: d, rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }] }, { [G]: d, rules: [w, { endpoint: { url: j, properties: v, headers: v }, [G]: i }] }] }] }, { error: "Invalid Configuration: Missing Region", [G]: k }] }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 2209: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSServiceException = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4195), exports); +tslib_1.__exportStar(__nccwpck_require__(7588), exports); +tslib_1.__exportStar(__nccwpck_require__(5716), exports); +tslib_1.__exportStar(__nccwpck_require__(106), exports); +tslib_1.__exportStar(__nccwpck_require__(8028), exports); +var STSServiceException_1 = __nccwpck_require__(6450); +Object.defineProperty(exports, "STSServiceException", ({ enumerable: true, get: function () { return STSServiceException_1.STSServiceException; } })); + + +/***/ }), + +/***/ 6450: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSServiceException = exports.__ServiceException = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +Object.defineProperty(exports, "__ServiceException", ({ enumerable: true, get: function () { return smithy_client_1.ServiceException; } })); +class STSServiceException extends smithy_client_1.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} +exports.STSServiceException = STSServiceException; + + +/***/ }), + +/***/ 106: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(1780), exports); + + +/***/ }), + +/***/ 1780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetSessionTokenResponseFilterSensitiveLog = exports.GetFederationTokenResponseFilterSensitiveLog = exports.AssumeRoleWithWebIdentityResponseFilterSensitiveLog = exports.AssumeRoleWithWebIdentityRequestFilterSensitiveLog = exports.AssumeRoleWithSAMLResponseFilterSensitiveLog = exports.AssumeRoleWithSAMLRequestFilterSensitiveLog = exports.AssumeRoleResponseFilterSensitiveLog = exports.CredentialsFilterSensitiveLog = exports.InvalidAuthorizationMessageException = exports.IDPCommunicationErrorException = exports.InvalidIdentityTokenException = exports.IDPRejectedClaimException = exports.RegionDisabledException = exports.PackedPolicyTooLargeException = exports.MalformedPolicyDocumentException = exports.ExpiredTokenException = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const STSServiceException_1 = __nccwpck_require__(6450); +class ExpiredTokenException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +exports.ExpiredTokenException = ExpiredTokenException; +class MalformedPolicyDocumentException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + this.name = "MalformedPolicyDocumentException"; + this.$fault = "client"; + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +exports.MalformedPolicyDocumentException = MalformedPolicyDocumentException; +class PackedPolicyTooLargeException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + this.name = "PackedPolicyTooLargeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +exports.PackedPolicyTooLargeException = PackedPolicyTooLargeException; +class RegionDisabledException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + this.name = "RegionDisabledException"; + this.$fault = "client"; + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +exports.RegionDisabledException = RegionDisabledException; +class IDPRejectedClaimException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + this.name = "IDPRejectedClaimException"; + this.$fault = "client"; + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +exports.IDPRejectedClaimException = IDPRejectedClaimException; +class InvalidIdentityTokenException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + this.name = "InvalidIdentityTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +exports.InvalidIdentityTokenException = InvalidIdentityTokenException; +class IDPCommunicationErrorException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + this.name = "IDPCommunicationErrorException"; + this.$fault = "client"; + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} +exports.IDPCommunicationErrorException = IDPCommunicationErrorException; +class InvalidAuthorizationMessageException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "InvalidAuthorizationMessageException", + $fault: "client", + ...opts, + }); + this.name = "InvalidAuthorizationMessageException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidAuthorizationMessageException.prototype); + } +} +exports.InvalidAuthorizationMessageException = InvalidAuthorizationMessageException; +const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: smithy_client_1.SENSITIVE_STRING }), +}); +exports.CredentialsFilterSensitiveLog = CredentialsFilterSensitiveLog; +const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), +}); +exports.AssumeRoleResponseFilterSensitiveLog = AssumeRoleResponseFilterSensitiveLog; +const AssumeRoleWithSAMLRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SAMLAssertion && { SAMLAssertion: smithy_client_1.SENSITIVE_STRING }), +}); +exports.AssumeRoleWithSAMLRequestFilterSensitiveLog = AssumeRoleWithSAMLRequestFilterSensitiveLog; +const AssumeRoleWithSAMLResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), +}); +exports.AssumeRoleWithSAMLResponseFilterSensitiveLog = AssumeRoleWithSAMLResponseFilterSensitiveLog; +const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.AssumeRoleWithWebIdentityRequestFilterSensitiveLog = AssumeRoleWithWebIdentityRequestFilterSensitiveLog; +const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), +}); +exports.AssumeRoleWithWebIdentityResponseFilterSensitiveLog = AssumeRoleWithWebIdentityResponseFilterSensitiveLog; +const GetFederationTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), +}); +exports.GetFederationTokenResponseFilterSensitiveLog = GetFederationTokenResponseFilterSensitiveLog; +const GetSessionTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), +}); +exports.GetSessionTokenResponseFilterSensitiveLog = GetSessionTokenResponseFilterSensitiveLog; + + +/***/ }), + +/***/ 740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.de_GetSessionTokenCommand = exports.de_GetFederationTokenCommand = exports.de_GetCallerIdentityCommand = exports.de_GetAccessKeyInfoCommand = exports.de_DecodeAuthorizationMessageCommand = exports.de_AssumeRoleWithWebIdentityCommand = exports.de_AssumeRoleWithSAMLCommand = exports.de_AssumeRoleCommand = exports.se_GetSessionTokenCommand = exports.se_GetFederationTokenCommand = exports.se_GetCallerIdentityCommand = exports.se_GetAccessKeyInfoCommand = exports.se_DecodeAuthorizationMessageCommand = exports.se_AssumeRoleWithWebIdentityCommand = exports.se_AssumeRoleWithSAMLCommand = exports.se_AssumeRoleCommand = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const smithy_client_1 = __nccwpck_require__(3570); +const fast_xml_parser_1 = __nccwpck_require__(2603); +const models_0_1 = __nccwpck_require__(1780); +const STSServiceException_1 = __nccwpck_require__(6450); +const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + Action: "AssumeRole", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleCommand = se_AssumeRoleCommand; +const se_AssumeRoleWithSAMLCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithSAMLRequest(input, context), + Action: "AssumeRoleWithSAML", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleWithSAMLCommand = se_AssumeRoleWithSAMLCommand; +const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + Action: "AssumeRoleWithWebIdentity", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleWithWebIdentityCommand = se_AssumeRoleWithWebIdentityCommand; +const se_DecodeAuthorizationMessageCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_DecodeAuthorizationMessageRequest(input, context), + Action: "DecodeAuthorizationMessage", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_DecodeAuthorizationMessageCommand = se_DecodeAuthorizationMessageCommand; +const se_GetAccessKeyInfoCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetAccessKeyInfoRequest(input, context), + Action: "GetAccessKeyInfo", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetAccessKeyInfoCommand = se_GetAccessKeyInfoCommand; +const se_GetCallerIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetCallerIdentityRequest(input, context), + Action: "GetCallerIdentity", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetCallerIdentityCommand = se_GetCallerIdentityCommand; +const se_GetFederationTokenCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetFederationTokenRequest(input, context), + Action: "GetFederationToken", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetFederationTokenCommand = se_GetFederationTokenCommand; +const se_GetSessionTokenCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetSessionTokenRequest(input, context), + Action: "GetSessionToken", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetSessionTokenCommand = se_GetSessionTokenCommand; +const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleCommand = de_AssumeRoleCommand; +const de_AssumeRoleCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_AssumeRoleWithSAMLCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleWithSAMLCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithSAMLResponse(data.AssumeRoleWithSAMLResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleWithSAMLCommand = de_AssumeRoleWithSAMLCommand; +const de_AssumeRoleWithSAMLCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleWithWebIdentityCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleWithWebIdentityCommand = de_AssumeRoleWithWebIdentityCommand; +const de_AssumeRoleWithWebIdentityCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_DecodeAuthorizationMessageCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_DecodeAuthorizationMessageCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DecodeAuthorizationMessageResponse(data.DecodeAuthorizationMessageResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_DecodeAuthorizationMessageCommand = de_DecodeAuthorizationMessageCommand; +const de_DecodeAuthorizationMessageCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidAuthorizationMessageException": + case "com.amazonaws.sts#InvalidAuthorizationMessageException": + throw await de_InvalidAuthorizationMessageExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_GetAccessKeyInfoCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetAccessKeyInfoCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetAccessKeyInfoResponse(data.GetAccessKeyInfoResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetAccessKeyInfoCommand = de_GetAccessKeyInfoCommand; +const de_GetAccessKeyInfoCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); +}; +const de_GetCallerIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetCallerIdentityCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetCallerIdentityResponse(data.GetCallerIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetCallerIdentityCommand = de_GetCallerIdentityCommand; +const de_GetCallerIdentityCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); +}; +const de_GetFederationTokenCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetFederationTokenCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetFederationTokenResponse(data.GetFederationTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetFederationTokenCommand = de_GetFederationTokenCommand; +const de_GetFederationTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_GetSessionTokenCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetSessionTokenCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetSessionTokenResponse(data.GetSessionTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetSessionTokenCommand = de_GetSessionTokenCommand; +const de_GetSessionTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new models_0_1.ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new models_0_1.IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new models_0_1.IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_InvalidAuthorizationMessageExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidAuthorizationMessageException(body.Error, context); + const exception = new models_0_1.InvalidAuthorizationMessageException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new models_0_1.InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new models_0_1.MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new models_0_1.PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new models_0_1.RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.RoleSessionName != null) { + entries["RoleSessionName"] = input.RoleSessionName; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.Tags != null) { + const memberEntries = se_tagListType(input.Tags, context); + if (input.Tags?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input.TransitiveTagKeys != null) { + const memberEntries = se_tagKeyListType(input.TransitiveTagKeys, context); + if (input.TransitiveTagKeys?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input.ExternalId != null) { + entries["ExternalId"] = input.ExternalId; + } + if (input.SerialNumber != null) { + entries["SerialNumber"] = input.SerialNumber; + } + if (input.TokenCode != null) { + entries["TokenCode"] = input.TokenCode; + } + if (input.SourceIdentity != null) { + entries["SourceIdentity"] = input.SourceIdentity; + } + if (input.ProvidedContexts != null) { + const memberEntries = se_ProvidedContextsListType(input.ProvidedContexts, context); + if (input.ProvidedContexts?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithSAMLRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.PrincipalArn != null) { + entries["PrincipalArn"] = input.PrincipalArn; + } + if (input.SAMLAssertion != null) { + entries["SAMLAssertion"] = input.SAMLAssertion; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.RoleSessionName != null) { + entries["RoleSessionName"] = input.RoleSessionName; + } + if (input.WebIdentityToken != null) { + entries["WebIdentityToken"] = input.WebIdentityToken; + } + if (input.ProviderId != null) { + entries["ProviderId"] = input.ProviderId; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + return entries; +}; +const se_DecodeAuthorizationMessageRequest = (input, context) => { + const entries = {}; + if (input.EncodedMessage != null) { + entries["EncodedMessage"] = input.EncodedMessage; + } + return entries; +}; +const se_GetAccessKeyInfoRequest = (input, context) => { + const entries = {}; + if (input.AccessKeyId != null) { + entries["AccessKeyId"] = input.AccessKeyId; + } + return entries; +}; +const se_GetCallerIdentityRequest = (input, context) => { + const entries = {}; + return entries; +}; +const se_GetFederationTokenRequest = (input, context) => { + const entries = {}; + if (input.Name != null) { + entries["Name"] = input.Name; + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.Tags != null) { + const memberEntries = se_tagListType(input.Tags, context); + if (input.Tags?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_GetSessionTokenRequest = (input, context) => { + const entries = {}; + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.SerialNumber != null) { + entries["SerialNumber"] = input.SerialNumber; + } + if (input.TokenCode != null) { + entries["TokenCode"] = input.TokenCode; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input.arn != null) { + entries["arn"] = input.arn; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input.ProviderArn != null) { + entries["ProviderArn"] = input.ProviderArn; + } + if (input.ContextAssertion != null) { + entries["ContextAssertion"] = input.ContextAssertion; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input.Key != null) { + entries["Key"] = input.Key; + } + if (input.Value != null) { + entries["Value"] = input.Value; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output["AssumedRoleId"] !== undefined) { + contents.AssumedRoleId = (0, smithy_client_1.expectString)(output["AssumedRoleId"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_AssumeRoleWithSAMLResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["Subject"] !== undefined) { + contents.Subject = (0, smithy_client_1.expectString)(output["Subject"]); + } + if (output["SubjectType"] !== undefined) { + contents.SubjectType = (0, smithy_client_1.expectString)(output["SubjectType"]); + } + if (output["Issuer"] !== undefined) { + contents.Issuer = (0, smithy_client_1.expectString)(output["Issuer"]); + } + if (output["Audience"] !== undefined) { + contents.Audience = (0, smithy_client_1.expectString)(output["Audience"]); + } + if (output["NameQualifier"] !== undefined) { + contents.NameQualifier = (0, smithy_client_1.expectString)(output["NameQualifier"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["SubjectFromWebIdentityToken"] !== undefined) { + contents.SubjectFromWebIdentityToken = (0, smithy_client_1.expectString)(output["SubjectFromWebIdentityToken"]); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["Provider"] !== undefined) { + contents.Provider = (0, smithy_client_1.expectString)(output["Provider"]); + } + if (output["Audience"] !== undefined) { + contents.Audience = (0, smithy_client_1.expectString)(output["Audience"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output["AccessKeyId"] !== undefined) { + contents.AccessKeyId = (0, smithy_client_1.expectString)(output["AccessKeyId"]); + } + if (output["SecretAccessKey"] !== undefined) { + contents.SecretAccessKey = (0, smithy_client_1.expectString)(output["SecretAccessKey"]); + } + if (output["SessionToken"] !== undefined) { + contents.SessionToken = (0, smithy_client_1.expectString)(output["SessionToken"]); + } + if (output["Expiration"] !== undefined) { + contents.Expiration = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.parseRfc3339DateTimeWithOffset)(output["Expiration"])); + } + return contents; +}; +const de_DecodeAuthorizationMessageResponse = (output, context) => { + const contents = {}; + if (output["DecodedMessage"] !== undefined) { + contents.DecodedMessage = (0, smithy_client_1.expectString)(output["DecodedMessage"]); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_FederatedUser = (output, context) => { + const contents = {}; + if (output["FederatedUserId"] !== undefined) { + contents.FederatedUserId = (0, smithy_client_1.expectString)(output["FederatedUserId"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_GetAccessKeyInfoResponse = (output, context) => { + const contents = {}; + if (output["Account"] !== undefined) { + contents.Account = (0, smithy_client_1.expectString)(output["Account"]); + } + return contents; +}; +const de_GetCallerIdentityResponse = (output, context) => { + const contents = {}; + if (output["UserId"] !== undefined) { + contents.UserId = (0, smithy_client_1.expectString)(output["UserId"]); + } + if (output["Account"] !== undefined) { + contents.Account = (0, smithy_client_1.expectString)(output["Account"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_GetFederationTokenResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["FederatedUser"] !== undefined) { + contents.FederatedUser = de_FederatedUser(output["FederatedUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + return contents; +}; +const de_GetSessionTokenResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_InvalidAuthorizationMessageException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => (0, smithy_client_1.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = (0, smithy_client_1.withBaseException)(STSServiceException_1.STSServiceException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new protocol_http_1.HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new fast_xml_parser_1.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + const parsedObj = parser.parse(encoded); + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, smithy_client_1.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}); +const parseErrorBody = async (errorBody, context) => { + const value = await parseBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => (0, smithy_client_1.extendedEncodeURIComponent)(key) + "=" + (0, smithy_client_1.extendedEncodeURIComponent)(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; + + +/***/ }), + +/***/ 3405: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(4351); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(7947)); +const defaultStsRoleAssumers_1 = __nccwpck_require__(48); +const credential_provider_node_1 = __nccwpck_require__(5531); +const util_user_agent_node_1 = __nccwpck_require__(8095); +const config_resolver_1 = __nccwpck_require__(3098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_1 = __nccwpck_require__(6039); +const node_config_provider_1 = __nccwpck_require__(3461); +const node_http_handler_1 = __nccwpck_require__(258); +const util_body_length_node_1 = __nccwpck_require__(8075); +const util_retry_1 = __nccwpck_require__(4902); +const runtimeConfig_shared_1 = __nccwpck_require__(2642); +const smithy_client_1 = __nccwpck_require__(3570); +const util_defaults_mode_node_1 = __nccwpck_require__(2429); +const smithy_client_2 = __nccwpck_require__(3570); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? (0, defaultStsRoleAssumers_1.decorateDefaultCredentialProvider)(credential_provider_node_1.defaultProvider), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: config?.region ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: config?.requestHandler ?? new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 2642: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const smithy_client_1 = __nccwpck_require__(3570); +const url_parser_1 = __nccwpck_require__(4681); +const util_base64_1 = __nccwpck_require__(5600); +const util_utf8_1 = __nccwpck_require__(1895); +const endpointResolver_1 = __nccwpck_require__(1203); +const getRuntimeConfig = (config) => ({ + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, +}); +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 255: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromEnv = exports.ENV_EXPIRATION = exports.ENV_SESSION = exports.ENV_SECRET = exports.ENV_KEY = void 0; +const property_provider_1 = __nccwpck_require__(9721); +exports.ENV_KEY = "AWS_ACCESS_KEY_ID"; +exports.ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +exports.ENV_SESSION = "AWS_SESSION_TOKEN"; +exports.ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +const fromEnv = () => async () => { + const accessKeyId = process.env[exports.ENV_KEY]; + const secretAccessKey = process.env[exports.ENV_SECRET]; + const sessionToken = process.env[exports.ENV_SESSION]; + const expiry = process.env[exports.ENV_EXPIRATION]; + if (accessKeyId && secretAccessKey) { + return { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + }; + } + throw new property_provider_1.CredentialsProviderError("Unable to find environment variable credentials."); +}; +exports.fromEnv = fromEnv; + + +/***/ }), + +/***/ 5972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(255), exports); + + +/***/ }), + +/***/ 5442: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromIni = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const resolveProfileData_1 = __nccwpck_require__(5653); +const fromIni = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + return (0, resolveProfileData_1.resolveProfileData)((0, shared_ini_file_loader_1.getProfileName)(init), profiles, init); +}; +exports.fromIni = fromIni; + + +/***/ }), + +/***/ 4203: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5442), exports); + + +/***/ }), + +/***/ 853: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveAssumeRoleCredentials = exports.isAssumeRoleProfile = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const resolveCredentialSource_1 = __nccwpck_require__(2458); +const resolveProfileData_1 = __nccwpck_require__(5653); +const isAssumeRoleProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg) || isAssumeRoleWithProviderProfile(arg)); +exports.isAssumeRoleProfile = isAssumeRoleProfile; +const isAssumeRoleWithSourceProfile = (arg) => typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; +const isAssumeRoleWithProviderProfile = (arg) => typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; +const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + const data = profiles[profileName]; + if (!options.roleAssumer) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} requires a role to be assumed, but no role assumption callback was provided.`, false); + } + const { source_profile } = data; + if (source_profile && source_profile in visitedProfiles) { + throw new property_provider_1.CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${(0, shared_ini_file_loader_1.getProfileName)(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), false); + } + const sourceCredsProvider = source_profile + ? (0, resolveProfileData_1.resolveProfileData)(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }) + : (0, resolveCredentialSource_1.resolveCredentialSource)(data.credential_source, profileName)(); + const params = { + RoleArn: data.role_arn, + RoleSessionName: data.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: data.external_id, + }; + const { mfa_serial } = data; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, false); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params); +}; +exports.resolveAssumeRoleCredentials = resolveAssumeRoleCredentials; + + +/***/ }), + +/***/ 2458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveCredentialSource = void 0; +const credential_provider_env_1 = __nccwpck_require__(5972); +const credential_provider_imds_1 = __nccwpck_require__(7477); +const property_provider_1 = __nccwpck_require__(9721); +const resolveCredentialSource = (credentialSource, profileName) => { + const sourceProvidersMap = { + EcsContainer: credential_provider_imds_1.fromContainerMetadata, + Ec2InstanceMetadata: credential_provider_imds_1.fromInstanceMetadata, + Environment: credential_provider_env_1.fromEnv, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource](); + } + else { + throw new property_provider_1.CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`); + } +}; +exports.resolveCredentialSource = resolveCredentialSource; + + +/***/ }), + +/***/ 9993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProcessCredentials = exports.isProcessProfile = void 0; +const credential_provider_process_1 = __nccwpck_require__(9969); +const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +exports.isProcessProfile = isProcessProfile; +const resolveProcessCredentials = async (options, profile) => (0, credential_provider_process_1.fromProcess)({ + ...options, + profile, +})(); +exports.resolveProcessCredentials = resolveProcessCredentials; + + +/***/ }), + +/***/ 5653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProfileData = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const resolveAssumeRoleCredentials_1 = __nccwpck_require__(853); +const resolveProcessCredentials_1 = __nccwpck_require__(9993); +const resolveSsoCredentials_1 = __nccwpck_require__(9867); +const resolveStaticCredentials_1 = __nccwpck_require__(3071); +const resolveWebIdentityCredentials_1 = __nccwpck_require__(8342); +const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && (0, resolveStaticCredentials_1.isStaticCredsProfile)(data)) { + return (0, resolveStaticCredentials_1.resolveStaticCredentials)(data); + } + if ((0, resolveAssumeRoleCredentials_1.isAssumeRoleProfile)(data)) { + return (0, resolveAssumeRoleCredentials_1.resolveAssumeRoleCredentials)(profileName, profiles, options, visitedProfiles); + } + if ((0, resolveStaticCredentials_1.isStaticCredsProfile)(data)) { + return (0, resolveStaticCredentials_1.resolveStaticCredentials)(data); + } + if ((0, resolveWebIdentityCredentials_1.isWebIdentityProfile)(data)) { + return (0, resolveWebIdentityCredentials_1.resolveWebIdentityCredentials)(data, options); + } + if ((0, resolveProcessCredentials_1.isProcessProfile)(data)) { + return (0, resolveProcessCredentials_1.resolveProcessCredentials)(options, profileName); + } + if ((0, resolveSsoCredentials_1.isSsoProfile)(data)) { + return (0, resolveSsoCredentials_1.resolveSsoCredentials)(data); + } + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} could not be found or parsed in shared credentials file.`); +}; +exports.resolveProfileData = resolveProfileData; + + +/***/ }), + +/***/ 9867: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSsoCredentials = exports.isSsoProfile = void 0; +const credential_provider_sso_1 = __nccwpck_require__(6414); +var credential_provider_sso_2 = __nccwpck_require__(6414); +Object.defineProperty(exports, "isSsoProfile", ({ enumerable: true, get: function () { return credential_provider_sso_2.isSsoProfile; } })); +const resolveSsoCredentials = (data) => { + const { sso_start_url, sso_account_id, sso_session, sso_region, sso_role_name } = (0, credential_provider_sso_1.validateSsoProfile)(data); + return (0, credential_provider_sso_1.fromSSO)({ + ssoStartUrl: sso_start_url, + ssoAccountId: sso_account_id, + ssoSession: sso_session, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + })(); +}; +exports.resolveSsoCredentials = resolveSsoCredentials; + + +/***/ }), + +/***/ 3071: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveStaticCredentials = exports.isStaticCredsProfile = void 0; +const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1; +exports.isStaticCredsProfile = isStaticCredsProfile; +const resolveStaticCredentials = (profile) => Promise.resolve({ + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, +}); +exports.resolveStaticCredentials = resolveStaticCredentials; + + +/***/ }), + +/***/ 8342: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveWebIdentityCredentials = exports.isWebIdentityProfile = void 0; +const credential_provider_web_identity_1 = __nccwpck_require__(5646); +const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +exports.isWebIdentityProfile = isWebIdentityProfile; +const resolveWebIdentityCredentials = async (profile, options) => (0, credential_provider_web_identity_1.fromTokenFile)({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, +})(); +exports.resolveWebIdentityCredentials = resolveWebIdentityCredentials; + + +/***/ }), + +/***/ 5560: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultProvider = void 0; +const credential_provider_env_1 = __nccwpck_require__(5972); +const credential_provider_ini_1 = __nccwpck_require__(4203); +const credential_provider_process_1 = __nccwpck_require__(9969); +const credential_provider_sso_1 = __nccwpck_require__(6414); +const credential_provider_web_identity_1 = __nccwpck_require__(5646); +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const remoteProvider_1 = __nccwpck_require__(626); +const defaultProvider = (init = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)(...(init.profile || process.env[shared_ini_file_loader_1.ENV_PROFILE] ? [] : [(0, credential_provider_env_1.fromEnv)()]), (0, credential_provider_sso_1.fromSSO)(init), (0, credential_provider_ini_1.fromIni)(init), (0, credential_provider_process_1.fromProcess)(init), (0, credential_provider_web_identity_1.fromTokenFile)(init), (0, remoteProvider_1.remoteProvider)(init), async () => { + throw new property_provider_1.CredentialsProviderError("Could not load credentials from any providers", false); +}), (credentials) => credentials.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000, (credentials) => credentials.expiration !== undefined); +exports.defaultProvider = defaultProvider; + + +/***/ }), + +/***/ 5531: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5560), exports); + + +/***/ }), + +/***/ 626: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.remoteProvider = exports.ENV_IMDS_DISABLED = void 0; +const credential_provider_imds_1 = __nccwpck_require__(7477); +const property_provider_1 = __nccwpck_require__(9721); +exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +const remoteProvider = (init) => { + if (process.env[credential_provider_imds_1.ENV_CMDS_RELATIVE_URI] || process.env[credential_provider_imds_1.ENV_CMDS_FULL_URI]) { + return (0, credential_provider_imds_1.fromContainerMetadata)(init); + } + if (process.env[exports.ENV_IMDS_DISABLED]) { + return async () => { + throw new property_provider_1.CredentialsProviderError("EC2 Instance Metadata Service access disabled"); + }; + } + return (0, credential_provider_imds_1.fromInstanceMetadata)(init); +}; +exports.remoteProvider = remoteProvider; + + +/***/ }), + +/***/ 2650: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromProcess = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const resolveProcessCredentials_1 = __nccwpck_require__(4926); +const fromProcess = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + return (0, resolveProcessCredentials_1.resolveProcessCredentials)((0, shared_ini_file_loader_1.getProfileName)(init), profiles); +}; +exports.fromProcess = fromProcess; + + +/***/ }), + +/***/ 1104: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getValidatedProcessCredentials = void 0; +const getValidatedProcessCredentials = (profileName, data) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + return { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + }; +}; +exports.getValidatedProcessCredentials = getValidatedProcessCredentials; + + +/***/ }), + +/***/ 9969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2650), exports); + + +/***/ }), + +/***/ 4926: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProcessCredentials = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const child_process_1 = __nccwpck_require__(2081); +const util_1 = __nccwpck_require__(3837); +const getValidatedProcessCredentials_1 = __nccwpck_require__(1104); +const resolveProcessCredentials = async (profileName, profiles) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = (0, util_1.promisify)(child_process_1.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch (_a) { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return (0, getValidatedProcessCredentials_1.getValidatedProcessCredentials)(profileName, data); + } + catch (error) { + throw new property_provider_1.CredentialsProviderError(error.message); + } + } + else { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`); + } + } + else { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`); + } +}; +exports.resolveProcessCredentials = resolveProcessCredentials; + + +/***/ }), + +/***/ 5184: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSSO = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const isSsoProfile_1 = __nccwpck_require__(2572); +const resolveSSOCredentials_1 = __nccwpck_require__(4729); +const validateSsoProfile_1 = __nccwpck_require__(8098); +const fromSSO = (init = {}) => async () => { + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, ssoSession } = init; + const profileName = (0, shared_ini_file_loader_1.getProfileName)(init); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} was not found.`); + } + if (!(0, isSsoProfile_1.isSsoProfile)(profile)) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`); + } + if (profile === null || profile === void 0 ? void 0 : profile.sso_session) { + const ssoSessions = await (0, shared_ini_file_loader_1.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new property_provider_1.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, false); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new property_provider_1.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, false); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = (0, validateSsoProfile_1.validateSsoProfile)(profile); + return (0, resolveSSOCredentials_1.resolveSSOCredentials)({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new property_provider_1.CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"'); + } + else { + return (0, resolveSSOCredentials_1.resolveSSOCredentials)({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + profile: profileName, + }); + } +}; +exports.fromSSO = fromSSO; + + +/***/ }), + +/***/ 6414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5184), exports); +tslib_1.__exportStar(__nccwpck_require__(2572), exports); +tslib_1.__exportStar(__nccwpck_require__(6623), exports); +tslib_1.__exportStar(__nccwpck_require__(8098), exports); + + +/***/ }), + +/***/ 2572: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSsoProfile = void 0; +const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); +exports.isSsoProfile = isSsoProfile; + + +/***/ }), + +/***/ 4729: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSSOCredentials = void 0; +const client_sso_1 = __nccwpck_require__(8197); +const token_providers_1 = __nccwpck_require__(2843); +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const EXPIRE_WINDOW_MS = 15 * 60 * 1000; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, profile, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, token_providers_1.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + } + else { + try { + token = await (0, shared_ini_file_loader_1.getSSOTokenFromFile)(ssoStartUrl); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= EXPIRE_WINDOW_MS) { + throw new property_provider_1.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + const { accessToken } = token; + const sso = ssoClient || new client_sso_1.SSOClient({ region: ssoRegion }); + let ssoResp; + try { + ssoResp = await sso.send(new client_sso_1.GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw property_provider_1.CredentialsProviderError.from(e, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration } = {} } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new property_provider_1.CredentialsProviderError("SSO returns an invalid temporary credential.", SHOULD_FAIL_CREDENTIAL_CHAIN); + } + return { accessKeyId, secretAccessKey, sessionToken, expiration: new Date(expiration) }; +}; +exports.resolveSSOCredentials = resolveSSOCredentials; + + +/***/ }), + +/***/ 6623: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateSsoProfile = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const validateSsoProfile = (profile) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new property_provider_1.CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, false); + } + return profile; +}; +exports.validateSsoProfile = validateSsoProfile; + + +/***/ }), + +/***/ 5614: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromTokenFile = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fs_1 = __nccwpck_require__(7147); +const fromWebToken_1 = __nccwpck_require__(7905); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + var _a, _b, _c; + const webIdentityTokenFile = (_a = init === null || init === void 0 ? void 0 : init.webIdentityTokenFile) !== null && _a !== void 0 ? _a : process.env[ENV_TOKEN_FILE]; + const roleArn = (_b = init === null || init === void 0 ? void 0 : init.roleArn) !== null && _b !== void 0 ? _b : process.env[ENV_ROLE_ARN]; + const roleSessionName = (_c = init === null || init === void 0 ? void 0 : init.roleSessionName) !== null && _c !== void 0 ? _c : process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified"); + } + return (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); +}; +exports.fromTokenFile = fromTokenFile; + + +/***/ }), + +/***/ 7905: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromWebToken = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fromWebToken = (init) => () => { + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds, roleAssumerWithWebIdentity, } = init; + if (!roleAssumerWithWebIdentity) { + throw new property_provider_1.CredentialsProviderError(`Role Arn '${roleArn}' needs to be assumed with web identity,` + + ` but no role assumption callback was provided.`, false); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName !== null && roleSessionName !== void 0 ? roleSessionName : `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; + + +/***/ }), + +/***/ 5646: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5614), exports); +tslib_1.__exportStar(__nccwpck_require__(7905), exports); + + +/***/ }), + +/***/ 2545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHostHeaderPlugin = exports.hostHeaderMiddlewareOptions = exports.hostHeaderMiddleware = exports.resolveHostHeaderConfig = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +function resolveHostHeaderConfig(input) { + return input; +} +exports.resolveHostHeaderConfig = resolveHostHeaderConfig; +const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!protocol_http_1.HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = ""; + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +exports.hostHeaderMiddleware = hostHeaderMiddleware; +exports.hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.hostHeaderMiddleware)(options), exports.hostHeaderMiddlewareOptions); + }, +}); +exports.getHostHeaderPlugin = getHostHeaderPlugin; + + +/***/ }), + +/***/ 14: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9754), exports); + + +/***/ }), + +/***/ 9754: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getLoggerPlugin = exports.loggerMiddlewareOptions = exports.loggerMiddleware = void 0; +const loggerMiddleware = () => (next, context) => async (args) => { + var _a, _b; + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog !== null && overrideInputFilterSensitiveLog !== void 0 ? overrideInputFilterSensitiveLog : context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog !== null && overrideOutputFilterSensitiveLog !== void 0 ? overrideOutputFilterSensitiveLog : context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + (_a = logger === null || logger === void 0 ? void 0 : logger.info) === null || _a === void 0 ? void 0 : _a.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog !== null && overrideInputFilterSensitiveLog !== void 0 ? overrideInputFilterSensitiveLog : context.inputFilterSensitiveLog; + (_b = logger === null || logger === void 0 ? void 0 : logger.error) === null || _b === void 0 ? void 0 : _b.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +exports.loggerMiddleware = loggerMiddleware; +exports.loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.loggerMiddleware)(), exports.loggerMiddlewareOptions); + }, +}); +exports.getLoggerPlugin = getLoggerPlugin; + + +/***/ }), + +/***/ 5525: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRecursionDetectionPlugin = exports.addRecursionDetectionMiddlewareOptions = exports.recursionDetectionMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!protocol_http_1.HttpRequest.isInstance(request) || + options.runtime !== "node" || + request.headers.hasOwnProperty(TRACE_ID_HEADER_NAME)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +exports.recursionDetectionMiddleware = recursionDetectionMiddleware; +exports.addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.recursionDetectionMiddleware)(options), exports.addRecursionDetectionMiddlewareOptions); + }, +}); +exports.getRecursionDetectionPlugin = getRecursionDetectionPlugin; + + +/***/ }), + +/***/ 5959: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveStsAuthConfig = void 0; +const middleware_signing_1 = __nccwpck_require__(4935); +const resolveStsAuthConfig = (input, { stsClientCtor }) => (0, middleware_signing_1.resolveAwsAuthConfig)({ + ...input, + stsClientCtor, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; + + +/***/ }), + +/***/ 4193: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSigV4AuthConfig = exports.resolveAwsAuthConfig = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const signature_v4_1 = __nccwpck_require__(1528); +const util_middleware_1 = __nccwpck_require__(2390); +const CREDENTIAL_EXPIRE_WINDOW = 300000; +const resolveAwsAuthConfig = (input) => { + const normalizedCreds = input.credentials + ? normalizeCredentialProvider(input.credentials) + : input.credentialDefaultProvider(input); + const { signingEscapePath = true, systemClockOffset = input.systemClockOffset || 0, sha256 } = input; + let signer; + if (input.signer) { + signer = (0, util_middleware_1.normalizeProvider)(input.signer); + } + else if (input.regionInfoProvider) { + signer = () => (0, util_middleware_1.normalizeProvider)(input.region)() + .then(async (region) => [ + (await input.regionInfoProvider(region, { + useFipsEndpoint: await input.useFipsEndpoint(), + useDualstackEndpoint: await input.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + input.signingRegion = input.signingRegion || signingRegion || region; + input.signingName = input.signingName || signingService || input.serviceId; + const params = { + ...input, + credentials: normalizedCreds, + region: input.signingRegion, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = input.signerConstructor || signature_v4_1.SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: input.signingName || input.defaultSigningName, + signingRegion: await (0, util_middleware_1.normalizeProvider)(input.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + input.signingRegion = input.signingRegion || signingRegion; + input.signingName = input.signingName || signingService || input.serviceId; + const params = { + ...input, + credentials: normalizedCreds, + region: input.signingRegion, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = input.signerConstructor || signature_v4_1.SignatureV4; + return new SignerCtor(params); + }; + } + return { + ...input, + systemClockOffset, + signingEscapePath, + credentials: normalizedCreds, + signer, + }; +}; +exports.resolveAwsAuthConfig = resolveAwsAuthConfig; +const resolveSigV4AuthConfig = (input) => { + const normalizedCreds = input.credentials + ? normalizeCredentialProvider(input.credentials) + : input.credentialDefaultProvider(input); + const { signingEscapePath = true, systemClockOffset = input.systemClockOffset || 0, sha256 } = input; + let signer; + if (input.signer) { + signer = (0, util_middleware_1.normalizeProvider)(input.signer); + } + else { + signer = (0, util_middleware_1.normalizeProvider)(new signature_v4_1.SignatureV4({ + credentials: normalizedCreds, + region: input.region, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + })); + } + return { + ...input, + systemClockOffset, + signingEscapePath, + credentials: normalizedCreds, + signer, + }; +}; +exports.resolveSigV4AuthConfig = resolveSigV4AuthConfig; +const normalizeCredentialProvider = (credentials) => { + if (typeof credentials === "function") { + return (0, property_provider_1.memoize)(credentials, (credentials) => credentials.expiration !== undefined && + credentials.expiration.getTime() - Date.now() < CREDENTIAL_EXPIRE_WINDOW, (credentials) => credentials.expiration !== undefined); + } + return (0, util_middleware_1.normalizeProvider)(credentials); +}; + + +/***/ }), + +/***/ 8053: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSigV4AuthPlugin = exports.getAwsAuthPlugin = exports.awsAuthMiddlewareOptions = exports.awsAuthMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const getSkewCorrectedDate_1 = __nccwpck_require__(8253); +const getUpdatedSystemClockOffset_1 = __nccwpck_require__(5863); +const awsAuthMiddleware = (options) => (next, context) => async function (args) { + var _a, _b, _c, _d; + if (!protocol_http_1.HttpRequest.isInstance(args.request)) + return next(args); + const authScheme = (_c = (_b = (_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.properties) === null || _b === void 0 ? void 0 : _b.authSchemes) === null || _c === void 0 ? void 0 : _c[0]; + const multiRegionOverride = (authScheme === null || authScheme === void 0 ? void 0 : authScheme.name) === "sigv4a" ? (_d = authScheme === null || authScheme === void 0 ? void 0 : authScheme.signingRegionSet) === null || _d === void 0 ? void 0 : _d.join(",") : undefined; + const signer = await options.signer(authScheme); + const output = await next({ + ...args, + request: await signer.sign(args.request, { + signingDate: (0, getSkewCorrectedDate_1.getSkewCorrectedDate)(options.systemClockOffset), + signingRegion: multiRegionOverride || context["signing_region"], + signingService: context["signing_service"], + }), + }).catch((error) => { + var _a; + const serverTime = (_a = error.ServerTime) !== null && _a !== void 0 ? _a : getDateHeader(error.$response); + if (serverTime) { + options.systemClockOffset = (0, getUpdatedSystemClockOffset_1.getUpdatedSystemClockOffset)(serverTime, options.systemClockOffset); + } + throw error; + }); + const dateHeader = getDateHeader(output.response); + if (dateHeader) { + options.systemClockOffset = (0, getUpdatedSystemClockOffset_1.getUpdatedSystemClockOffset)(dateHeader, options.systemClockOffset); + } + return output; +}; +exports.awsAuthMiddleware = awsAuthMiddleware; +const getDateHeader = (response) => { var _a, _b, _c; return protocol_http_1.HttpResponse.isInstance(response) ? (_b = (_a = response.headers) === null || _a === void 0 ? void 0 : _a.date) !== null && _b !== void 0 ? _b : (_c = response.headers) === null || _c === void 0 ? void 0 : _c.Date : undefined; }; +exports.awsAuthMiddlewareOptions = { + name: "awsAuthMiddleware", + tags: ["SIGNATURE", "AWSAUTH"], + relation: "after", + toMiddleware: "retryMiddleware", + override: true, +}; +const getAwsAuthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, exports.awsAuthMiddleware)(options), exports.awsAuthMiddlewareOptions); + }, +}); +exports.getAwsAuthPlugin = getAwsAuthPlugin; +exports.getSigV4AuthPlugin = exports.getAwsAuthPlugin; + + +/***/ }), + +/***/ 4935: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4193), exports); +tslib_1.__exportStar(__nccwpck_require__(8053), exports); + + +/***/ }), + +/***/ 8253: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSkewCorrectedDate = void 0; +const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); +exports.getSkewCorrectedDate = getSkewCorrectedDate; + + +/***/ }), + +/***/ 5863: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUpdatedSystemClockOffset = void 0; +const isClockSkewed_1 = __nccwpck_require__(5301); +const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if ((0, isClockSkewed_1.isClockSkewed)(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; +exports.getUpdatedSystemClockOffset = getUpdatedSystemClockOffset; + + +/***/ }), + +/***/ 5301: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isClockSkewed = void 0; +const getSkewCorrectedDate_1 = __nccwpck_require__(8253); +const isClockSkewed = (clockTime, systemClockOffset) => Math.abs((0, getSkewCorrectedDate_1.getSkewCorrectedDate)(systemClockOffset).getTime() - clockTime) >= 300000; +exports.isClockSkewed = isClockSkewed; + + +/***/ }), + +/***/ 6546: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveUserAgentConfig = void 0; +function resolveUserAgentConfig(input) { + return { + ...input, + customUserAgent: typeof input.customUserAgent === "string" ? [[input.customUserAgent]] : input.customUserAgent, + }; +} +exports.resolveUserAgentConfig = resolveUserAgentConfig; + + +/***/ }), + +/***/ 8025: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UA_ESCAPE_CHAR = exports.UA_VALUE_ESCAPE_REGEX = exports.UA_NAME_ESCAPE_REGEX = exports.UA_NAME_SEPARATOR = exports.SPACE = exports.X_AMZ_USER_AGENT = exports.USER_AGENT = void 0; +exports.USER_AGENT = "user-agent"; +exports.X_AMZ_USER_AGENT = "x-amz-user-agent"; +exports.SPACE = " "; +exports.UA_NAME_SEPARATOR = "/"; +exports.UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +exports.UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +exports.UA_ESCAPE_CHAR = "-"; + + +/***/ }), + +/***/ 4688: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(6546), exports); +tslib_1.__exportStar(__nccwpck_require__(6236), exports); + + +/***/ }), + +/***/ 6236: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentPlugin = exports.getUserAgentMiddlewareOptions = exports.userAgentMiddleware = void 0; +const util_endpoints_1 = __nccwpck_require__(3350); +const protocol_http_1 = __nccwpck_require__(4418); +const constants_1 = __nccwpck_require__(8025); +const userAgentMiddleware = (options) => (next, context) => async (args) => { + var _a, _b; + const { request } = args; + if (!protocol_http_1.HttpRequest.isInstance(request)) + return next(args); + const { headers } = request; + const userAgent = ((_a = context === null || context === void 0 ? void 0 : context.userAgent) === null || _a === void 0 ? void 0 : _a.map(escapeUserAgent)) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + const customUserAgent = ((_b = options === null || options === void 0 ? void 0 : options.customUserAgent) === null || _b === void 0 ? void 0 : _b.map(escapeUserAgent)) || []; + const prefix = (0, util_endpoints_1.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(constants_1.SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(constants_1.SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[constants_1.X_AMZ_USER_AGENT] = headers[constants_1.X_AMZ_USER_AGENT] + ? `${headers[constants_1.USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[constants_1.USER_AGENT] = sdkUserAgentValue; + } + else { + headers[constants_1.X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +exports.userAgentMiddleware = userAgentMiddleware; +const escapeUserAgent = (userAgentPair) => { + var _a; + const name = userAgentPair[0] + .split(constants_1.UA_NAME_SEPARATOR) + .map((part) => part.replace(constants_1.UA_NAME_ESCAPE_REGEX, constants_1.UA_ESCAPE_CHAR)) + .join(constants_1.UA_NAME_SEPARATOR); + const version = (_a = userAgentPair[1]) === null || _a === void 0 ? void 0 : _a.replace(constants_1.UA_VALUE_ESCAPE_REGEX, constants_1.UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(constants_1.UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +exports.getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.userAgentMiddleware)(config), exports.getUserAgentMiddlewareOptions); + }, +}); +exports.getUserAgentPlugin = getUserAgentPlugin; + + +/***/ }), + +/***/ 2664: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UnsupportedGrantTypeException = exports.UnauthorizedClientException = exports.SlowDownException = exports.SSOOIDCClient = exports.InvalidScopeException = exports.InvalidRequestException = exports.InvalidClientException = exports.InternalServerException = exports.ExpiredTokenException = exports.CreateTokenCommand = exports.AuthorizationPendingException = exports.AccessDeniedException = void 0; +const middleware_host_header_1 = __nccwpck_require__(2545); +const middleware_logger_1 = __nccwpck_require__(14); +const middleware_recursion_detection_1 = __nccwpck_require__(5525); +const middleware_user_agent_1 = __nccwpck_require__(4688); +const config_resolver_1 = __nccwpck_require__(3098); +const middleware_content_length_1 = __nccwpck_require__(2800); +const middleware_endpoint_1 = __nccwpck_require__(2918); +const middleware_retry_1 = __nccwpck_require__(6039); +const smithy_client_1 = __nccwpck_require__(3570); +var resolveClientEndpointParameters = (options) => { + var _a, _b; + return { + ...options, + useDualstackEndpoint: (_a = options.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false, + useFipsEndpoint: (_b = options.useFipsEndpoint) !== null && _b !== void 0 ? _b : false, + defaultSigningName: "awsssooidc" + }; +}; +var package_default = { version: "3.387.0" }; +const util_user_agent_node_1 = __nccwpck_require__(8095); +const config_resolver_2 = __nccwpck_require__(3098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_2 = __nccwpck_require__(6039); +const node_config_provider_1 = __nccwpck_require__(3461); +const node_http_handler_1 = __nccwpck_require__(258); +const util_body_length_node_1 = __nccwpck_require__(8075); +const util_retry_1 = __nccwpck_require__(4902); +const smithy_client_2 = __nccwpck_require__(3570); +const url_parser_1 = __nccwpck_require__(4681); +const util_base64_1 = __nccwpck_require__(5600); +const util_utf8_1 = __nccwpck_require__(1895); +const util_endpoints_1 = __nccwpck_require__(3350); +var p = "required"; +var q = "fn"; +var r = "argv"; +var s = "ref"; +var a = "PartitionResult"; +var b = "tree"; +var c = "error"; +var d = "endpoint"; +var e = { [p]: false, "type": "String" }; +var f = { [p]: true, "default": false, "type": "Boolean" }; +var g = { [s]: "Endpoint" }; +var h = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }; +var i = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }; +var j = {}; +var k = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsFIPS"] }] }; +var l = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsDualStack"] }] }; +var m = [g]; +var n = [h]; +var o = [i]; +var _data = { version: "1.0", parameters: { Region: e, UseDualStack: f, UseFIPS: f, Endpoint: e }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: a }], type: b, rules: [{ conditions: [{ [q]: "isSet", [r]: m }, { [q]: "parseURL", [r]: m, assign: "url" }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: g, properties: j, headers: j }, type: d }] }] }, { conditions: [h, i], type: b, rules: [{ conditions: [k, l], type: b, rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [k], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [l], type: b, rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }; +var ruleSet = _data; +var defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleSet, { + endpointParams, + logger: context.logger + }); +}; +var getRuntimeConfig = (config) => { + var _a, _b, _c, _d, _e, _f, _g, _h, _j; + return ({ + apiVersion: "2019-06-10", + base64Decoder: (_a = config === null || config === void 0 ? void 0 : config.base64Decoder) !== null && _a !== void 0 ? _a : util_base64_1.fromBase64, + base64Encoder: (_b = config === null || config === void 0 ? void 0 : config.base64Encoder) !== null && _b !== void 0 ? _b : util_base64_1.toBase64, + disableHostPrefix: (_c = config === null || config === void 0 ? void 0 : config.disableHostPrefix) !== null && _c !== void 0 ? _c : false, + endpointProvider: (_d = config === null || config === void 0 ? void 0 : config.endpointProvider) !== null && _d !== void 0 ? _d : defaultEndpointResolver, + logger: (_e = config === null || config === void 0 ? void 0 : config.logger) !== null && _e !== void 0 ? _e : new smithy_client_2.NoOpLogger(), + serviceId: (_f = config === null || config === void 0 ? void 0 : config.serviceId) !== null && _f !== void 0 ? _f : "SSO OIDC", + urlParser: (_g = config === null || config === void 0 ? void 0 : config.urlParser) !== null && _g !== void 0 ? _g : url_parser_1.parseUrl, + utf8Decoder: (_h = config === null || config === void 0 ? void 0 : config.utf8Decoder) !== null && _h !== void 0 ? _h : util_utf8_1.fromUtf8, + utf8Encoder: (_j = config === null || config === void 0 ? void 0 : config.utf8Encoder) !== null && _j !== void 0 ? _j : util_utf8_1.toUtf8 + }); +}; +const smithy_client_3 = __nccwpck_require__(3570); +const util_defaults_mode_node_1 = __nccwpck_require__(2429); +const smithy_client_4 = __nccwpck_require__(3570); +var getRuntimeConfig2 = (config) => { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; + (0, smithy_client_4.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_3.loadConfigsForDefaultMode); + const clientSharedValues = getRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: (_a = config === null || config === void 0 ? void 0 : config.bodyLengthChecker) !== null && _a !== void 0 ? _a : util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: (_b = config === null || config === void 0 ? void 0 : config.defaultUserAgentProvider) !== null && _b !== void 0 ? _b : (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_default.version }), + maxAttempts: (_c = config === null || config === void 0 ? void 0 : config.maxAttempts) !== null && _c !== void 0 ? _c : (0, node_config_provider_1.loadConfig)(middleware_retry_2.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: (_d = config === null || config === void 0 ? void 0 : config.region) !== null && _d !== void 0 ? _d : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_REGION_CONFIG_OPTIONS, config_resolver_2.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: (_e = config === null || config === void 0 ? void 0 : config.requestHandler) !== null && _e !== void 0 ? _e : new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: (_f = config === null || config === void 0 ? void 0 : config.retryMode) !== null && _f !== void 0 ? _f : (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_2.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE + }), + sha256: (_g = config === null || config === void 0 ? void 0 : config.sha256) !== null && _g !== void 0 ? _g : hash_node_1.Hash.bind(null, "sha256"), + streamCollector: (_h = config === null || config === void 0 ? void 0 : config.streamCollector) !== null && _h !== void 0 ? _h : node_http_handler_1.streamCollector, + useDualstackEndpoint: (_j = config === null || config === void 0 ? void 0 : config.useDualstackEndpoint) !== null && _j !== void 0 ? _j : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: (_k = config === null || config === void 0 ? void 0 : config.useFipsEndpoint) !== null && _k !== void 0 ? _k : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS) + }; +}; +var SSOOIDCClient = class extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = getRuntimeConfig2(configuration || {}); + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +}; +exports.SSOOIDCClient = SSOOIDCClient; +const smithy_client_5 = __nccwpck_require__(3570); +const middleware_endpoint_2 = __nccwpck_require__(2918); +const middleware_serde_1 = __nccwpck_require__(1238); +const smithy_client_6 = __nccwpck_require__(3570); +const protocol_http_1 = __nccwpck_require__(4418); +const smithy_client_7 = __nccwpck_require__(3570); +const smithy_client_8 = __nccwpck_require__(3570); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends smithy_client_8.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + this.name = "AccessDeniedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.AccessDeniedException = AccessDeniedException; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + this.name = "AuthorizationPendingException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.AuthorizationPendingException = AuthorizationPendingException; +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.ExpiredTokenException = ExpiredTokenException; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + this.name = "InternalServerException"; + this.$fault = "server"; + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InternalServerException = InternalServerException; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidClientException = InvalidClientException; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + this.name = "InvalidGrantException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidRequestException = InvalidRequestException; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + this.name = "InvalidScopeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidScopeException = InvalidScopeException; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + this.name = "SlowDownException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.SlowDownException = SlowDownException; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + this.name = "UnauthorizedClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.UnauthorizedClientException = UnauthorizedClientException; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + this.name = "UnsupportedGrantTypeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.UnsupportedGrantTypeException = UnsupportedGrantTypeException; +var InvalidClientMetadataException = class _InvalidClientMetadataException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidClientMetadataException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientMetadataException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientMetadataException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var se_CreateTokenCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/token`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientId: [], + clientSecret: [], + code: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => (0, smithy_client_7._json)(_) + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var se_RegisterClientCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/client/register`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientName: [], + clientType: [], + scopes: (_) => (0, smithy_client_7._json)(_) + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var se_StartDeviceAuthorizationCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/device_authorization`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientId: [], + clientSecret: [], + startUrl: [] + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CreateTokenCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + accessToken: smithy_client_7.expectString, + expiresIn: smithy_client_7.expectInt32, + idToken: smithy_client_7.expectString, + refreshToken: smithy_client_7.expectString, + tokenType: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_CreateTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var de_RegisterClientCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_RegisterClientCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + authorizationEndpoint: smithy_client_7.expectString, + clientId: smithy_client_7.expectString, + clientIdIssuedAt: smithy_client_7.expectLong, + clientSecret: smithy_client_7.expectString, + clientSecretExpiresAt: smithy_client_7.expectLong, + tokenEndpoint: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_RegisterClientCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientMetadataException": + case "com.amazonaws.ssooidc#InvalidClientMetadataException": + throw await de_InvalidClientMetadataExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var de_StartDeviceAuthorizationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_StartDeviceAuthorizationCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + deviceCode: smithy_client_7.expectString, + expiresIn: smithy_client_7.expectInt32, + interval: smithy_client_7.expectInt32, + userCode: smithy_client_7.expectString, + verificationUri: smithy_client_7.expectString, + verificationUriComplete: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_StartDeviceAuthorizationCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var throwDefaultError = (0, smithy_client_7.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidClientMetadataExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientMetadataException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var deserializeMetadata = (output) => { + var _a, _b; + return ({ + httpStatusCode: output.statusCode, + requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] + }); +}; +var collectBodyString = (streamBody, context) => (0, smithy_client_7.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +var parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; +}); +var parseErrorBody = async (errorBody, context) => { + var _a; + const value = await parseBody(errorBody, context); + value.message = (_a = value.message) !== null && _a !== void 0 ? _a : value.Message; + return value; +}; +var loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k2) => k2.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}; +var CreateTokenCommand = class _CreateTokenCommand extends smithy_client_6.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_2.getEndpointPlugin)(configuration, _CreateTokenCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "CreateTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_CreateTokenCommand(input, context); + } + deserialize(output, context) { + return de_CreateTokenCommand(output, context); + } +}; +exports.CreateTokenCommand = CreateTokenCommand; +const middleware_endpoint_3 = __nccwpck_require__(2918); +const middleware_serde_2 = __nccwpck_require__(1238); +const smithy_client_9 = __nccwpck_require__(3570); +var RegisterClientCommand = class _RegisterClientCommand extends smithy_client_9.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_2.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_3.getEndpointPlugin)(configuration, _RegisterClientCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "RegisterClientCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_RegisterClientCommand(input, context); + } + deserialize(output, context) { + return de_RegisterClientCommand(output, context); + } +}; +const middleware_endpoint_4 = __nccwpck_require__(2918); +const middleware_serde_3 = __nccwpck_require__(1238); +const smithy_client_10 = __nccwpck_require__(3570); +var StartDeviceAuthorizationCommand = class _StartDeviceAuthorizationCommand extends smithy_client_10.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_3.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_4.getEndpointPlugin)(configuration, _StartDeviceAuthorizationCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "StartDeviceAuthorizationCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_StartDeviceAuthorizationCommand(input, context); + } + deserialize(output, context) { + return de_StartDeviceAuthorizationCommand(output, context); + } +}; +var commands = { + CreateTokenCommand, + RegisterClientCommand, + StartDeviceAuthorizationCommand +}; +var SSOOIDC = class extends SSOOIDCClient { +}; +(0, smithy_client_5.createAggregatedClient)(commands, SSOOIDC); + + +/***/ }), + +/***/ 2242: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.REFRESH_MESSAGE = exports.EXPIRE_WINDOW_MS = void 0; +exports.EXPIRE_WINDOW_MS = 5 * 60 * 1000; +exports.REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + + +/***/ }), + +/***/ 5125: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSso = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const constants_1 = __nccwpck_require__(2242); +const getNewSsoOidcToken_1 = __nccwpck_require__(3601); +const validateTokenExpiry_1 = __nccwpck_require__(8418); +const validateTokenKey_1 = __nccwpck_require__(2488); +const writeSSOTokenToFile_1 = __nccwpck_require__(8552); +const lastRefreshAttemptTime = new Date(0); +const fromSso = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + const profileName = (0, shared_ini_file_loader_1.getProfileName)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new property_provider_1.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new property_provider_1.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, shared_ini_file_loader_1.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new property_provider_1.TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new property_provider_1.TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, shared_ini_file_loader_1.getSSOTokenFromFile)(ssoSessionName); + } + catch (e) { + throw new property_provider_1.TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${constants_1.REFRESH_MESSAGE}`, false); + } + (0, validateTokenKey_1.validateTokenKey)("accessToken", ssoToken.accessToken); + (0, validateTokenKey_1.validateTokenKey)("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > constants_1.EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + (0, validateTokenExpiry_1.validateTokenExpiry)(existingToken); + return existingToken; + } + (0, validateTokenKey_1.validateTokenKey)("clientId", ssoToken.clientId, true); + (0, validateTokenKey_1.validateTokenKey)("clientSecret", ssoToken.clientSecret, true); + (0, validateTokenKey_1.validateTokenKey)("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await (0, getNewSsoOidcToken_1.getNewSsoOidcToken)(ssoToken, ssoRegion); + (0, validateTokenKey_1.validateTokenKey)("accessToken", newSsoOidcToken.accessToken); + (0, validateTokenKey_1.validateTokenKey)("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await (0, writeSSOTokenToFile_1.writeSSOTokenToFile)(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + (0, validateTokenExpiry_1.validateTokenExpiry)(existingToken); + return existingToken; + } +}; +exports.fromSso = fromSso; + + +/***/ }), + +/***/ 3258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fromStatic = ({ token }) => async () => { + if (!token || !token.token) { + throw new property_provider_1.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; +exports.fromStatic = fromStatic; + + +/***/ }), + +/***/ 3601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getNewSsoOidcToken = void 0; +const client_sso_oidc_node_1 = __nccwpck_require__(2664); +const getSsoOidcClient_1 = __nccwpck_require__(9775); +const getNewSsoOidcToken = (ssoToken, ssoRegion) => { + const ssoOidcClient = (0, getSsoOidcClient_1.getSsoOidcClient)(ssoRegion); + return ssoOidcClient.send(new client_sso_oidc_node_1.CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; +exports.getNewSsoOidcToken = getNewSsoOidcToken; + + +/***/ }), + +/***/ 9775: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSsoOidcClient = void 0; +const client_sso_oidc_node_1 = __nccwpck_require__(2664); +const ssoOidcClientsHash = {}; +const getSsoOidcClient = (ssoRegion) => { + if (ssoOidcClientsHash[ssoRegion]) { + return ssoOidcClientsHash[ssoRegion]; + } + const ssoOidcClient = new client_sso_oidc_node_1.SSOOIDCClient({ region: ssoRegion }); + ssoOidcClientsHash[ssoRegion] = ssoOidcClient; + return ssoOidcClient; +}; +exports.getSsoOidcClient = getSsoOidcClient; + + +/***/ }), + +/***/ 2843: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2664), exports); +tslib_1.__exportStar(__nccwpck_require__(5125), exports); +tslib_1.__exportStar(__nccwpck_require__(3258), exports); +tslib_1.__exportStar(__nccwpck_require__(195), exports); + + +/***/ }), + +/***/ 195: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.nodeProvider = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fromSso_1 = __nccwpck_require__(5125); +const nodeProvider = (init = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromSso_1.fromSso)(init), async () => { + throw new property_provider_1.TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); +exports.nodeProvider = nodeProvider; + + +/***/ }), + +/***/ 8418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateTokenExpiry = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const constants_1 = __nccwpck_require__(2242); +const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new property_provider_1.TokenProviderError(`Token is expired. ${constants_1.REFRESH_MESSAGE}`, false); + } +}; +exports.validateTokenExpiry = validateTokenExpiry; + + +/***/ }), + +/***/ 2488: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateTokenKey = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const constants_1 = __nccwpck_require__(2242); +const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new property_provider_1.TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${constants_1.REFRESH_MESSAGE}`, false); + } +}; +exports.validateTokenKey = validateTokenKey; + + +/***/ }), + +/***/ 8552: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeSSOTokenToFile = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const fs_1 = __nccwpck_require__(7147); +const { writeFile } = fs_1.promises; +const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = (0, shared_ini_file_loader_1.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; +exports.writeSSOTokenToFile = writeSSOTokenToFile; + + +/***/ }), + +/***/ 2562: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var types_1 = __nccwpck_require__(5756); +Object.defineProperty(exports, "HttpAuthLocation", ({ enumerable: true, get: function () { return types_1.HttpAuthLocation; } })); + + +/***/ }), + +/***/ 4994: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 5861: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6527: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8470: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8045: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7736: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 3268: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 142: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HostAddressType = void 0; +var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType = exports.HostAddressType || (exports.HostAddressType = {})); + + +/***/ }), + +/***/ 2338: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var types_1 = __nccwpck_require__(5756); +Object.defineProperty(exports, "EndpointURLScheme", ({ enumerable: true, get: function () { return types_1.EndpointURLScheme; } })); + + +/***/ }), + +/***/ 7521: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4466: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1821: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 2635: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1301: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1268: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7192: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 640: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(1821), exports); +tslib_1.__exportStar(__nccwpck_require__(2635), exports); +tslib_1.__exportStar(__nccwpck_require__(1301), exports); +tslib_1.__exportStar(__nccwpck_require__(1268), exports); +tslib_1.__exportStar(__nccwpck_require__(7192), exports); + + +/***/ }), + +/***/ 9029: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2562), exports); +tslib_1.__exportStar(__nccwpck_require__(6913), exports); +tslib_1.__exportStar(__nccwpck_require__(4994), exports); +tslib_1.__exportStar(__nccwpck_require__(5861), exports); +tslib_1.__exportStar(__nccwpck_require__(6527), exports); +tslib_1.__exportStar(__nccwpck_require__(8470), exports); +tslib_1.__exportStar(__nccwpck_require__(8045), exports); +tslib_1.__exportStar(__nccwpck_require__(7736), exports); +tslib_1.__exportStar(__nccwpck_require__(3268), exports); +tslib_1.__exportStar(__nccwpck_require__(142), exports); +tslib_1.__exportStar(__nccwpck_require__(2338), exports); +tslib_1.__exportStar(__nccwpck_require__(9385), exports); +tslib_1.__exportStar(__nccwpck_require__(7521), exports); +tslib_1.__exportStar(__nccwpck_require__(4466), exports); +tslib_1.__exportStar(__nccwpck_require__(640), exports); +tslib_1.__exportStar(__nccwpck_require__(9910), exports); +tslib_1.__exportStar(__nccwpck_require__(6678), exports); +tslib_1.__exportStar(__nccwpck_require__(9931), exports); +tslib_1.__exportStar(__nccwpck_require__(2620), exports); +tslib_1.__exportStar(__nccwpck_require__(9062), exports); +tslib_1.__exportStar(__nccwpck_require__(9546), exports); +tslib_1.__exportStar(__nccwpck_require__(316), exports); +tslib_1.__exportStar(__nccwpck_require__(7835), exports); +tslib_1.__exportStar(__nccwpck_require__(1678), exports); +tslib_1.__exportStar(__nccwpck_require__(3818), exports); +tslib_1.__exportStar(__nccwpck_require__(1991), exports); +tslib_1.__exportStar(__nccwpck_require__(4296), exports); +tslib_1.__exportStar(__nccwpck_require__(9416), exports); +tslib_1.__exportStar(__nccwpck_require__(2772), exports); +tslib_1.__exportStar(__nccwpck_require__(134), exports); +tslib_1.__exportStar(__nccwpck_require__(4465), exports); + + +/***/ }), + +/***/ 9910: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6678: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9931: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 2620: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9062: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9546: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 316: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7835: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1678: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 3818: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1991: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4296: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9416: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var types_1 = __nccwpck_require__(5756); +Object.defineProperty(exports, "RequestHandlerProtocol", ({ enumerable: true, get: function () { return types_1.RequestHandlerProtocol; } })); + + +/***/ }), + +/***/ 2772: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 134: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4465: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1809: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.debugId = void 0; +exports.debugId = "endpoints"; + + +/***/ }), + +/***/ 7617: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(1809), exports); +tslib_1.__exportStar(__nccwpck_require__(6833), exports); + + +/***/ }), + +/***/ 6833: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDebugString = void 0; +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +exports.toDebugString = toDebugString; + + +/***/ }), + +/***/ 3350: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(7482), exports); +tslib_1.__exportStar(__nccwpck_require__(3442), exports); +tslib_1.__exportStar(__nccwpck_require__(6563), exports); +tslib_1.__exportStar(__nccwpck_require__(7433), exports); + + +/***/ }), + +/***/ 6835: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(8079), exports); +tslib_1.__exportStar(__nccwpck_require__(4711), exports); +tslib_1.__exportStar(__nccwpck_require__(7482), exports); + + +/***/ }), + +/***/ 8079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isVirtualHostableS3Bucket = void 0; +const isIpAddress_1 = __nccwpck_require__(3442); +const isValidHostLabel_1 = __nccwpck_require__(7373); +const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!(0, exports.isVirtualHostableS3Bucket)(label)) { + return false; + } + } + return true; + } + if (!(0, isValidHostLabel_1.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, isIpAddress_1.isIpAddress)(value)) { + return false; + } + return true; +}; +exports.isVirtualHostableS3Bucket = isVirtualHostableS3Bucket; + + +/***/ }), + +/***/ 4711: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseArn = void 0; +const parseArn = (value) => { + const segments = value.split(":"); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourceId] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourceId[0] === "") + return null; + return { + partition, + service, + region, + accountId, + resourceId: resourceId[0].includes("/") ? resourceId[0].split("/") : resourceId, + }; +}; +exports.parseArn = parseArn; + + +/***/ }), + +/***/ 7482: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentPrefix = exports.useDefaultPartitionInfo = exports.setPartitionInfo = exports.partition = void 0; +const tslib_1 = __nccwpck_require__(4351); +const partitions_json_1 = tslib_1.__importDefault(__nccwpck_require__(5367)); +let selectedPartitionsInfo = partitions_json_1.default; +let selectedUserAgentPrefix = ""; +const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +exports.partition = partition; +const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +exports.setPartitionInfo = setPartitionInfo; +const useDefaultPartitionInfo = () => { + (0, exports.setPartitionInfo)(partitions_json_1.default, ""); +}; +exports.useDefaultPartitionInfo = useDefaultPartitionInfo; +const getUserAgentPrefix = () => selectedUserAgentPrefix; +exports.getUserAgentPrefix = getUserAgentPrefix; + + +/***/ }), + +/***/ 5370: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.booleanEquals = void 0; +const booleanEquals = (value1, value2) => value1 === value2; +exports.booleanEquals = booleanEquals; + + +/***/ }), + +/***/ 767: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAttr = void 0; +const types_1 = __nccwpck_require__(7433); +const getAttrPathList_1 = __nccwpck_require__(1844); +const getAttr = (value, path) => (0, getAttrPathList_1.getAttrPathList)(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new types_1.EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); +exports.getAttr = getAttr; + + +/***/ }), + +/***/ 1844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAttrPathList = void 0; +const types_1 = __nccwpck_require__(7433); +const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new types_1.EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new types_1.EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; +exports.getAttrPathList = getAttrPathList; + + +/***/ }), + +/***/ 3188: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.aws = void 0; +const tslib_1 = __nccwpck_require__(4351); +exports.aws = tslib_1.__importStar(__nccwpck_require__(6835)); +tslib_1.__exportStar(__nccwpck_require__(5370), exports); +tslib_1.__exportStar(__nccwpck_require__(767), exports); +tslib_1.__exportStar(__nccwpck_require__(8816), exports); +tslib_1.__exportStar(__nccwpck_require__(7373), exports); +tslib_1.__exportStar(__nccwpck_require__(9692), exports); +tslib_1.__exportStar(__nccwpck_require__(2780), exports); +tslib_1.__exportStar(__nccwpck_require__(5182), exports); +tslib_1.__exportStar(__nccwpck_require__(8305), exports); +tslib_1.__exportStar(__nccwpck_require__(6535), exports); + + +/***/ }), + +/***/ 3442: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isIpAddress = void 0; +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); +exports.isIpAddress = isIpAddress; + + +/***/ }), + +/***/ 8816: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSet = void 0; +const isSet = (value) => value != null; +exports.isSet = isSet; + + +/***/ }), + +/***/ 7373: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostLabel = void 0; +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!(0, exports.isValidHostLabel)(label)) { + return false; + } + } + return true; +}; +exports.isValidHostLabel = isValidHostLabel; + + +/***/ }), + +/***/ 9692: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.not = void 0; +const not = (value) => !value; +exports.not = not; + + +/***/ }), + +/***/ 2780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseURL = void 0; +const types_1 = __nccwpck_require__(9029); +const isIpAddress_1 = __nccwpck_require__(3442); +const DEFAULT_PORTS = { + [types_1.EndpointURLScheme.HTTP]: 80, + [types_1.EndpointURLScheme.HTTPS]: 443, +}; +const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(types_1.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = (0, isIpAddress_1.isIpAddress)(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; +exports.parseURL = parseURL; + + +/***/ }), + +/***/ 5182: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringEquals = void 0; +const stringEquals = (value1, value2) => value1 === value2; +exports.stringEquals = stringEquals; + + +/***/ }), + +/***/ 8305: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.substring = void 0; +const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; +exports.substring = substring; + + +/***/ }), + +/***/ 6535: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uriEncode = void 0; +const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); +exports.uriEncode = uriEncode; + + +/***/ }), + +/***/ 6563: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpoint = void 0; +const debug_1 = __nccwpck_require__(7617); +const types_1 = __nccwpck_require__(7433); +const utils_1 = __nccwpck_require__(1114); +const resolveEndpoint = (ruleSetObject, options) => { + var _a, _b, _c, _d, _e, _f; + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, `${debug_1.debugId} Initial EndpointParams: ${(0, debug_1.toDebugString)(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = (_c = endpointParams[paramKey]) !== null && _c !== void 0 ? _c : paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new types_1.EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = (0, utils_1.evaluateRules)(rules, { endpointParams, logger, referenceRecord: {} }); + if ((_d = options.endpointParams) === null || _d === void 0 ? void 0 : _d.Endpoint) { + try { + const givenEndpoint = new URL(options.endpointParams.Endpoint); + const { protocol, port } = givenEndpoint; + endpoint.url.protocol = protocol; + endpoint.url.port = port; + } + catch (e) { + } + } + (_f = (_e = options.logger) === null || _e === void 0 ? void 0 : _e.debug) === null || _f === void 0 ? void 0 : _f.call(_e, `${debug_1.debugId} Resolved endpoint: ${(0, debug_1.toDebugString)(endpoint)}`); + return endpoint; +}; +exports.resolveEndpoint = resolveEndpoint; + + +/***/ }), + +/***/ 2605: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointError = void 0; +class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} +exports.EndpointError = EndpointError; + + +/***/ }), + +/***/ 1261: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 312: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6083: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1767: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2605), exports); +tslib_1.__exportStar(__nccwpck_require__(1261), exports); +tslib_1.__exportStar(__nccwpck_require__(312), exports); +tslib_1.__exportStar(__nccwpck_require__(6083), exports); +tslib_1.__exportStar(__nccwpck_require__(1767), exports); +tslib_1.__exportStar(__nccwpck_require__(1811), exports); + + +/***/ }), + +/***/ 1811: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 5075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callFunction = void 0; +const tslib_1 = __nccwpck_require__(4351); +const lib = tslib_1.__importStar(__nccwpck_require__(3188)); +const evaluateExpression_1 = __nccwpck_require__(2980); +const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : (0, evaluateExpression_1.evaluateExpression)(arg, "arg", options)); + return fn.split(".").reduce((acc, key) => acc[key], lib)(...evaluatedArgs); +}; +exports.callFunction = callFunction; + + +/***/ }), + +/***/ 7851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateCondition = void 0; +const debug_1 = __nccwpck_require__(7617); +const types_1 = __nccwpck_require__(7433); +const callFunction_1 = __nccwpck_require__(5075); +const evaluateCondition = ({ assign, ...fnArgs }, options) => { + var _a, _b; + if (assign && assign in options.referenceRecord) { + throw new types_1.EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = (0, callFunction_1.callFunction)(fnArgs, options); + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `evaluateCondition: ${(0, debug_1.toDebugString)(fnArgs)} = ${(0, debug_1.toDebugString)(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; +exports.evaluateCondition = evaluateCondition; + + +/***/ }), + +/***/ 1506: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateConditions = void 0; +const debug_1 = __nccwpck_require__(7617); +const evaluateCondition_1 = __nccwpck_require__(7851); +const evaluateConditions = (conditions = [], options) => { + var _a, _b; + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = (0, evaluateCondition_1.evaluateCondition)(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `assign: ${toAssign.name} := ${(0, debug_1.toDebugString)(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; +exports.evaluateConditions = evaluateConditions; + + +/***/ }), + +/***/ 5324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateEndpointRule = void 0; +const debug_1 = __nccwpck_require__(7617); +const evaluateConditions_1 = __nccwpck_require__(1506); +const getEndpointHeaders_1 = __nccwpck_require__(8268); +const getEndpointProperties_1 = __nccwpck_require__(4973); +const getEndpointUrl_1 = __nccwpck_require__(3602); +const evaluateEndpointRule = (endpointRule, options) => { + var _a, _b; + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `Resolving endpoint from template: ${(0, debug_1.toDebugString)(endpoint)}`); + return { + ...(headers != undefined && { + headers: (0, getEndpointHeaders_1.getEndpointHeaders)(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: (0, getEndpointProperties_1.getEndpointProperties)(properties, endpointRuleOptions), + }), + url: (0, getEndpointUrl_1.getEndpointUrl)(url, endpointRuleOptions), + }; +}; +exports.evaluateEndpointRule = evaluateEndpointRule; + + +/***/ }), + +/***/ 2110: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateErrorRule = void 0; +const types_1 = __nccwpck_require__(7433); +const evaluateConditions_1 = __nccwpck_require__(1506); +const evaluateExpression_1 = __nccwpck_require__(2980); +const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + throw new types_1.EndpointError((0, evaluateExpression_1.evaluateExpression)(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; +exports.evaluateErrorRule = evaluateErrorRule; + + +/***/ }), + +/***/ 2980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateExpression = void 0; +const types_1 = __nccwpck_require__(7433); +const callFunction_1 = __nccwpck_require__(5075); +const evaluateTemplate_1 = __nccwpck_require__(7535); +const getReferenceValue_1 = __nccwpck_require__(8810); +const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return (0, evaluateTemplate_1.evaluateTemplate)(obj, options); + } + else if (obj["fn"]) { + return (0, callFunction_1.callFunction)(obj, options); + } + else if (obj["ref"]) { + return (0, getReferenceValue_1.getReferenceValue)(obj, options); + } + throw new types_1.EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; +exports.evaluateExpression = evaluateExpression; + + +/***/ }), + +/***/ 9738: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateRules = void 0; +const types_1 = __nccwpck_require__(7433); +const evaluateEndpointRule_1 = __nccwpck_require__(5324); +const evaluateErrorRule_1 = __nccwpck_require__(2110); +const evaluateTreeRule_1 = __nccwpck_require__(6587); +const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = (0, evaluateEndpointRule_1.evaluateEndpointRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + (0, evaluateErrorRule_1.evaluateErrorRule)(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = (0, evaluateTreeRule_1.evaluateTreeRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new types_1.EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new types_1.EndpointError(`Rules evaluation failed`); +}; +exports.evaluateRules = evaluateRules; + + +/***/ }), + +/***/ 7535: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateTemplate = void 0; +const lib_1 = __nccwpck_require__(3188); +const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push((0, lib_1.getAttr)(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; +exports.evaluateTemplate = evaluateTemplate; + + +/***/ }), + +/***/ 6587: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateTreeRule = void 0; +const evaluateConditions_1 = __nccwpck_require__(1506); +const evaluateRules_1 = __nccwpck_require__(9738); +const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + return (0, evaluateRules_1.evaluateRules)(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; +exports.evaluateTreeRule = evaluateTreeRule; + + +/***/ }), + +/***/ 8268: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointHeaders = void 0; +const types_1 = __nccwpck_require__(7433); +const evaluateExpression_1 = __nccwpck_require__(2980); +const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = (0, evaluateExpression_1.evaluateExpression)(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new types_1.EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); +exports.getEndpointHeaders = getEndpointHeaders; + + +/***/ }), + +/***/ 4973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointProperties = void 0; +const getEndpointProperty_1 = __nccwpck_require__(2978); +const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: (0, getEndpointProperty_1.getEndpointProperty)(propertyVal, options), +}), {}); +exports.getEndpointProperties = getEndpointProperties; + + +/***/ }), + +/***/ 2978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointProperty = void 0; +const types_1 = __nccwpck_require__(7433); +const evaluateTemplate_1 = __nccwpck_require__(7535); +const getEndpointProperties_1 = __nccwpck_require__(4973); +const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => (0, exports.getEndpointProperty)(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return (0, evaluateTemplate_1.evaluateTemplate)(property, options); + case "object": + if (property === null) { + throw new types_1.EndpointError(`Unexpected endpoint property: ${property}`); + } + return (0, getEndpointProperties_1.getEndpointProperties)(property, options); + case "boolean": + return property; + default: + throw new types_1.EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; +exports.getEndpointProperty = getEndpointProperty; + + +/***/ }), + +/***/ 3602: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointUrl = void 0; +const types_1 = __nccwpck_require__(7433); +const evaluateExpression_1 = __nccwpck_require__(2980); +const getEndpointUrl = (endpointUrl, options) => { + const expression = (0, evaluateExpression_1.evaluateExpression)(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new types_1.EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; +exports.getEndpointUrl = getEndpointUrl; + + +/***/ }), + +/***/ 8810: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getReferenceValue = void 0; +const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; +exports.getReferenceValue = getReferenceValue; + + +/***/ }), + +/***/ 1114: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9738), exports); + + +/***/ }), + +/***/ 8095: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultUserAgent = exports.UA_APP_ID_INI_NAME = exports.UA_APP_ID_ENV_NAME = void 0; +const node_config_provider_1 = __nccwpck_require__(3461); +const os_1 = __nccwpck_require__(2037); +const process_1 = __nccwpck_require__(7282); +const is_crt_available_1 = __nccwpck_require__(8390); +exports.UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +exports.UA_APP_ID_INI_NAME = "sdk-ua-app-id"; +const defaultUserAgent = ({ serviceId, clientVersion }) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.0"], + [`os/${(0, os_1.platform)()}`, (0, os_1.release)()], + ["lang/js"], + ["md/nodejs", `${process_1.versions.node}`], + ]; + const crtAvailable = (0, is_crt_available_1.isCrtAvailable)(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (process_1.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${process_1.env.AWS_EXECUTION_ENV}`]); + } + const appIdPromise = (0, node_config_provider_1.loadConfig)({ + environmentVariableSelector: (env) => env[exports.UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[exports.UA_APP_ID_INI_NAME], + default: undefined, + })(); + let resolvedUserAgent = undefined; + return async () => { + if (!resolvedUserAgent) { + const appId = await appIdPromise; + resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + } + return resolvedUserAgent; + }; +}; +exports.defaultUserAgent = defaultUserAgent; + + +/***/ }), + +/***/ 8390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCrtAvailable = void 0; +const isCrtAvailable = () => { + try { + if ( true && __nccwpck_require__(7578)) { + return ["md/crt-avail"]; + } + return null; + } + catch (e) { + return null; + } +}; +exports.isCrtAvailable = isCrtAvailable; + + +/***/ }), + +/***/ 8172: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +const pureJs_1 = __nccwpck_require__(1590); +const whatwgEncodingApi_1 = __nccwpck_require__(9215); +const fromUtf8 = (input) => typeof TextEncoder === "function" ? (0, whatwgEncodingApi_1.fromUtf8)(input) : (0, pureJs_1.fromUtf8)(input); +exports.fromUtf8 = fromUtf8; +const toUtf8 = (input) => typeof TextDecoder === "function" ? (0, whatwgEncodingApi_1.toUtf8)(input) : (0, pureJs_1.toUtf8)(input); +exports.toUtf8 = toUtf8; + + +/***/ }), + +/***/ 1590: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +const fromUtf8 = (input) => { + const bytes = []; + for (let i = 0, len = input.length; i < len; i++) { + const value = input.charCodeAt(i); + if (value < 0x80) { + bytes.push(value); + } + else if (value < 0x800) { + bytes.push((value >> 6) | 0b11000000, (value & 0b111111) | 0b10000000); + } + else if (i + 1 < input.length && (value & 0xfc00) === 0xd800 && (input.charCodeAt(i + 1) & 0xfc00) === 0xdc00) { + const surrogatePair = 0x10000 + ((value & 0b1111111111) << 10) + (input.charCodeAt(++i) & 0b1111111111); + bytes.push((surrogatePair >> 18) | 0b11110000, ((surrogatePair >> 12) & 0b111111) | 0b10000000, ((surrogatePair >> 6) & 0b111111) | 0b10000000, (surrogatePair & 0b111111) | 0b10000000); + } + else { + bytes.push((value >> 12) | 0b11100000, ((value >> 6) & 0b111111) | 0b10000000, (value & 0b111111) | 0b10000000); + } + } + return Uint8Array.from(bytes); +}; +exports.fromUtf8 = fromUtf8; +const toUtf8 = (input) => { + let decoded = ""; + for (let i = 0, len = input.length; i < len; i++) { + const byte = input[i]; + if (byte < 0x80) { + decoded += String.fromCharCode(byte); + } + else if (0b11000000 <= byte && byte < 0b11100000) { + const nextByte = input[++i]; + decoded += String.fromCharCode(((byte & 0b11111) << 6) | (nextByte & 0b111111)); + } + else if (0b11110000 <= byte && byte < 0b101101101) { + const surrogatePair = [byte, input[++i], input[++i], input[++i]]; + const encoded = "%" + surrogatePair.map((byteValue) => byteValue.toString(16)).join("%"); + decoded += decodeURIComponent(encoded); + } + else { + decoded += String.fromCharCode(((byte & 0b1111) << 12) | ((input[++i] & 0b111111) << 6) | (input[++i] & 0b111111)); + } + } + return decoded; +}; +exports.toUtf8 = toUtf8; + + +/***/ }), + +/***/ 9215: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +function fromUtf8(input) { + return new TextEncoder().encode(input); +} +exports.fromUtf8 = fromUtf8; +function toUtf8(input) { + return new TextDecoder("utf-8").decode(input); +} +exports.toUtf8 = toUtf8; + + +/***/ }), + +/***/ 3779: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_DUALSTACK_ENDPOINT = exports.CONFIG_USE_DUALSTACK_ENDPOINT = exports.ENV_USE_DUALSTACK_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(3375); +exports.ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +exports.CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +exports.DEFAULT_USE_DUALSTACK_ENDPOINT = false; +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; + + +/***/ }), + +/***/ 7994: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_FIPS_ENDPOINT = exports.CONFIG_USE_FIPS_ENDPOINT = exports.ENV_USE_FIPS_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(3375); +exports.ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +exports.CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +exports.DEFAULT_USE_FIPS_ENDPOINT = false; +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; + + +/***/ }), + +/***/ 8421: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(3779), exports); +tslib_1.__exportStar(__nccwpck_require__(7994), exports); +tslib_1.__exportStar(__nccwpck_require__(7432), exports); +tslib_1.__exportStar(__nccwpck_require__(1892), exports); + + +/***/ }), + +/***/ 7432: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveCustomEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const resolveCustomEndpointsConfig = (input) => { + var _a, _b; + const { endpoint, urlParser } = input; + return { + ...input, + tls: (_a = input.tls) !== null && _a !== void 0 ? _a : true, + endpoint: (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + }; +}; +exports.resolveCustomEndpointsConfig = resolveCustomEndpointsConfig; + + +/***/ }), + +/***/ 1892: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const getEndpointFromRegion_1 = __nccwpck_require__(8570); +const resolveEndpointsConfig = (input) => { + var _a, _b; + const useDualstackEndpoint = (0, util_middleware_1.normalizeProvider)((_a = input.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false); + const { endpoint, useFipsEndpoint, urlParser } = input; + return { + ...input, + tls: (_b = input.tls) !== null && _b !== void 0 ? _b : true, + endpoint: endpoint + ? (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => (0, getEndpointFromRegion_1.getEndpointFromRegion)({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }; +}; +exports.resolveEndpointsConfig = resolveEndpointsConfig; + + +/***/ }), + +/***/ 8570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromRegion = void 0; +const getEndpointFromRegion = async (input) => { + var _a; + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (_a = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }))) !== null && _a !== void 0 ? _a : {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; +exports.getEndpointFromRegion = getEndpointFromRegion; + + +/***/ }), + +/***/ 3098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(8421), exports); +tslib_1.__exportStar(__nccwpck_require__(221), exports); +tslib_1.__exportStar(__nccwpck_require__(6985), exports); + + +/***/ }), + +/***/ 3898: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_REGION_CONFIG_FILE_OPTIONS = exports.NODE_REGION_CONFIG_OPTIONS = exports.REGION_INI_NAME = exports.REGION_ENV_NAME = void 0; +exports.REGION_ENV_NAME = "AWS_REGION"; +exports.REGION_INI_NAME = "region"; +exports.NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.REGION_ENV_NAME], + configFileSelector: (profile) => profile[exports.REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +exports.NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; + + +/***/ }), + +/***/ 9506: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRealRegion = void 0; +const isFipsRegion_1 = __nccwpck_require__(3870); +const getRealRegion = (region) => (0, isFipsRegion_1.isFipsRegion)(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; +exports.getRealRegion = getRealRegion; + + +/***/ }), + +/***/ 221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(3898), exports); +tslib_1.__exportStar(__nccwpck_require__(7065), exports); + + +/***/ }), + +/***/ 3870: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isFipsRegion = void 0; +const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); +exports.isFipsRegion = isFipsRegion; + + +/***/ }), + +/***/ 7065: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRegionConfig = void 0; +const getRealRegion_1 = __nccwpck_require__(9506); +const isFipsRegion_1 = __nccwpck_require__(3870); +const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return { + ...input, + region: async () => { + if (typeof region === "string") { + return (0, getRealRegion_1.getRealRegion)(region); + } + const providedRegion = await region(); + return (0, getRealRegion_1.getRealRegion)(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if ((0, isFipsRegion_1.isFipsRegion)(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }; +}; +exports.resolveRegionConfig = resolveRegionConfig; + + +/***/ }), + +/***/ 9814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4832: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9760: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHostnameFromVariants = void 0; +const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => { + var _a; + return (_a = variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))) === null || _a === void 0 ? void 0 : _a.hostname; +}; +exports.getHostnameFromVariants = getHostnameFromVariants; + + +/***/ }), + +/***/ 7792: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRegionInfo = void 0; +const getHostnameFromVariants_1 = __nccwpck_require__(9760); +const getResolvedHostname_1 = __nccwpck_require__(1487); +const getResolvedPartition_1 = __nccwpck_require__(4441); +const getResolvedSigningRegion_1 = __nccwpck_require__(2281); +const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + var _a, _b, _c, _d, _e, _f; + const partition = (0, getResolvedPartition_1.getResolvedPartition)(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : (_b = (_a = partitionHash[partition]) === null || _a === void 0 ? void 0 : _a.endpoint) !== null && _b !== void 0 ? _b : region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_c = regionHash[resolvedRegion]) === null || _c === void 0 ? void 0 : _c.variants, hostnameOptions); + const partitionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_d = partitionHash[partition]) === null || _d === void 0 ? void 0 : _d.variants, hostnameOptions); + const hostname = (0, getResolvedHostname_1.getResolvedHostname)(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = (0, getResolvedSigningRegion_1.getResolvedSigningRegion)(hostname, { + signingRegion: (_e = regionHash[resolvedRegion]) === null || _e === void 0 ? void 0 : _e.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(((_f = regionHash[resolvedRegion]) === null || _f === void 0 ? void 0 : _f.signingService) && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; +exports.getRegionInfo = getRegionInfo; + + +/***/ }), + +/***/ 1487: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedHostname = void 0; +const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; +exports.getResolvedHostname = getResolvedHostname; + + +/***/ }), + +/***/ 4441: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedPartition = void 0; +const getResolvedPartition = (region, { partitionHash }) => { var _a; return (_a = Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region))) !== null && _a !== void 0 ? _a : "aws"; }; +exports.getResolvedPartition = getResolvedPartition; + + +/***/ }), + +/***/ 2281: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedSigningRegion = void 0; +const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; +exports.getResolvedSigningRegion = getResolvedSigningRegion; + + +/***/ }), + +/***/ 6985: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9814), exports); +tslib_1.__exportStar(__nccwpck_require__(4832), exports); +tslib_1.__exportStar(__nccwpck_require__(7792), exports); + + +/***/ }), + +/***/ 8044: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Endpoint = void 0; +var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint = exports.Endpoint || (exports.Endpoint = {})); + + +/***/ }), + +/***/ 7342: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ENDPOINT_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_NAME = exports.ENV_ENDPOINT_NAME = void 0; +exports.ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +exports.CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +exports.ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_NAME], + default: undefined, +}; + + +/***/ }), + +/***/ 991: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointMode = void 0; +var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode = exports.EndpointMode || (exports.EndpointMode = {})); + + +/***/ }), + +/***/ 8337: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ENDPOINT_MODE_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_MODE_NAME = exports.ENV_ENDPOINT_MODE_NAME = void 0; +const EndpointMode_1 = __nccwpck_require__(991); +exports.ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +exports.CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +exports.ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode_1.EndpointMode.IPv4, +}; + + +/***/ }), + +/***/ 9227: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromContainerMetadata = exports.ENV_CMDS_AUTH_TOKEN = exports.ENV_CMDS_RELATIVE_URI = exports.ENV_CMDS_FULL_URI = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const url_1 = __nccwpck_require__(7310); +const httpRequest_1 = __nccwpck_require__(2199); +const ImdsCredentials_1 = __nccwpck_require__(9040); +const RemoteProviderInit_1 = __nccwpck_require__(8533); +const retry_1 = __nccwpck_require__(1351); +exports.ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +exports.ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +exports.ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); + return () => (0, retry_1.retry)(async () => { + const requestOptions = await getCmdsUri(); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { + throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); + }, maxRetries); +}; +exports.fromContainerMetadata = fromContainerMetadata; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[exports.ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[exports.ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await (0, httpRequest_1.httpRequest)({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async () => { + if (process.env[exports.ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[exports.ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[exports.ENV_CMDS_FULL_URI]) { + const parsed = (0, url_1.parse)(process.env[exports.ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new property_provider_1.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, false); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new property_provider_1.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, false); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new property_provider_1.CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${exports.ENV_CMDS_RELATIVE_URI} or ${exports.ENV_CMDS_FULL_URI} environment` + + " variable is set", false); +}; + + +/***/ }), + +/***/ 2207: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromInstanceMetadata = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const httpRequest_1 = __nccwpck_require__(2199); +const ImdsCredentials_1 = __nccwpck_require__(9040); +const RemoteProviderInit_1 = __nccwpck_require__(8533); +const retry_1 = __nccwpck_require__(1351); +const getInstanceMetadataEndpoint_1 = __nccwpck_require__(2460); +const staticStabilityProvider_1 = __nccwpck_require__(4035); +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const fromInstanceMetadata = (init = {}) => (0, staticStabilityProvider_1.staticStabilityProvider)(getInstanceImdsProvider(init), { logger: init.logger }); +exports.fromInstanceMetadata = fromInstanceMetadata; +const getInstanceImdsProvider = (init) => { + let disableFetchToken = false; + const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); + const getCredentials = async (maxRetries, options) => { + const profile = (await (0, retry_1.retry)(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return (0, retry_1.retry)(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(profile, options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await (0, getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint)(); + if (disableFetchToken) { + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if ((error === null || error === void 0 ? void 0 : error.statusCode) === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + "x-aws-ec2-metadata-token": token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => (0, httpRequest_1.httpRequest)({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await (0, httpRequest_1.httpRequest)({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options) => { + const credsResponse = JSON.parse((await (0, httpRequest_1.httpRequest)({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { + throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); +}; + + +/***/ }), + +/***/ 7477: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInstanceMetadataEndpoint = exports.httpRequest = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9227), exports); +tslib_1.__exportStar(__nccwpck_require__(2207), exports); +tslib_1.__exportStar(__nccwpck_require__(8533), exports); +tslib_1.__exportStar(__nccwpck_require__(5036), exports); +var httpRequest_1 = __nccwpck_require__(2199); +Object.defineProperty(exports, "httpRequest", ({ enumerable: true, get: function () { return httpRequest_1.httpRequest; } })); +var getInstanceMetadataEndpoint_1 = __nccwpck_require__(2460); +Object.defineProperty(exports, "getInstanceMetadataEndpoint", ({ enumerable: true, get: function () { return getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint; } })); + + +/***/ }), + +/***/ 9040: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromImdsCredentials = exports.isImdsCredentials = void 0; +const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +exports.isImdsCredentials = isImdsCredentials; +const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), +}); +exports.fromImdsCredentials = fromImdsCredentials; + + +/***/ }), + +/***/ 8533: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.providerConfigFromInit = exports.DEFAULT_MAX_RETRIES = exports.DEFAULT_TIMEOUT = void 0; +exports.DEFAULT_TIMEOUT = 1000; +exports.DEFAULT_MAX_RETRIES = 0; +const providerConfigFromInit = ({ maxRetries = exports.DEFAULT_MAX_RETRIES, timeout = exports.DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); +exports.providerConfigFromInit = providerConfigFromInit; + + +/***/ }), + +/***/ 2199: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.httpRequest = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const buffer_1 = __nccwpck_require__(4300); +const http_1 = __nccwpck_require__(3685); +function httpRequest(options) { + return new Promise((resolve, reject) => { + var _a; + const req = (0, http_1.request)({ + method: "GET", + ...options, + hostname: (_a = options.hostname) === null || _a === void 0 ? void 0 : _a.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new property_provider_1.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new property_provider_1.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new property_provider_1.ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(buffer_1.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +exports.httpRequest = httpRequest; + + +/***/ }), + +/***/ 1351: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retry = void 0; +const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; +exports.retry = retry; + + +/***/ }), + +/***/ 5036: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 2666: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExtendedInstanceMetadataCredentials = void 0; +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + var _a; + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + "credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: " + + STATIC_STABILITY_DOC_URL); + const originalExpiration = (_a = credentials.originalExpiration) !== null && _a !== void 0 ? _a : credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; +exports.getExtendedInstanceMetadataCredentials = getExtendedInstanceMetadataCredentials; + + +/***/ }), + +/***/ 2460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInstanceMetadataEndpoint = void 0; +const node_config_provider_1 = __nccwpck_require__(3461); +const url_parser_1 = __nccwpck_require__(4681); +const Endpoint_1 = __nccwpck_require__(8044); +const EndpointConfigOptions_1 = __nccwpck_require__(7342); +const EndpointMode_1 = __nccwpck_require__(991); +const EndpointModeConfigOptions_1 = __nccwpck_require__(8337); +const getInstanceMetadataEndpoint = async () => (0, url_parser_1.parseUrl)((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +exports.getInstanceMetadataEndpoint = getInstanceMetadataEndpoint; +const getFromEndpointConfig = async () => (0, node_config_provider_1.loadConfig)(EndpointConfigOptions_1.ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await (0, node_config_provider_1.loadConfig)(EndpointModeConfigOptions_1.ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode_1.EndpointMode.IPv4: + return Endpoint_1.Endpoint.IPv4; + case EndpointMode_1.EndpointMode.IPv6: + return Endpoint_1.Endpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode_1.EndpointMode)}`); + } +}; + + +/***/ }), + +/***/ 4035: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.staticStabilityProvider = void 0; +const getExtendedInstanceMetadataCredentials_1 = __nccwpck_require__(2666); +const staticStabilityProvider = (provider, options = {}) => { + const logger = (options === null || options === void 0 ? void 0 : options.logger) || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; +exports.staticStabilityProvider = staticStabilityProvider; + + +/***/ }), + +/***/ 1014: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EventStreamCodec = void 0; +const crc32_1 = __nccwpck_require__(7327); +const HeaderMarshaller_1 = __nccwpck_require__(4712); +const splitMessage_1 = __nccwpck_require__(597); +class EventStreamCodec { + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller_1.HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new crc32_1.Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + decode(message) { + const { headers, body } = (0, splitMessage_1.splitMessage)(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +} +exports.EventStreamCodec = EventStreamCodec; + + +/***/ }), + +/***/ 4712: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HeaderMarshaller = void 0; +const util_hex_encoding_1 = __nccwpck_require__(5364); +const Int64_1 = __nccwpck_require__(6086); +class HeaderMarshaller { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64_1.Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set((0, util_hex_encoding_1.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0: + out[name] = { + type: BOOLEAN_TAG, + value: true, + }; + break; + case 1: + out[name] = { + type: BOOLEAN_TAG, + value: false, + }; + break; + case 2: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++), + }; + break; + case 3: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false), + }; + position += 2; + break; + case 4: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false), + }; + position += 4; + break; + case 5: + out[name] = { + type: LONG_TAG, + value: new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)), + }; + position += 8; + break; + case 6: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength), + }; + position += binaryLength; + break; + case 7: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)), + }; + position += stringLength; + break; + case 8: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()), + }; + position += 8; + break; + case 9: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(0, 4))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(4, 6))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(6, 8))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(8, 10))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(10))}`, + }; + break; + default: + throw new Error(`Unrecognized header type tag`); + } + } + return out; + } +} +exports.HeaderMarshaller = HeaderMarshaller; +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const BOOLEAN_TAG = "boolean"; +const BYTE_TAG = "byte"; +const SHORT_TAG = "short"; +const INT_TAG = "integer"; +const LONG_TAG = "long"; +const BINARY_TAG = "binary"; +const STRING_TAG = "string"; +const TIMESTAMP_TAG = "timestamp"; +const UUID_TAG = "uuid"; +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; + + +/***/ }), + +/***/ 6086: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Int64 = void 0; +const util_hex_encoding_1 = __nccwpck_require__(5364); +class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt((0, util_hex_encoding_1.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +exports.Int64 = Int64; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} + + +/***/ }), + +/***/ 3684: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7255: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageDecoderStream = void 0; +class MessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +} +exports.MessageDecoderStream = MessageDecoderStream; + + +/***/ }), + +/***/ 2362: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageEncoderStream = void 0; +class MessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +} +exports.MessageEncoderStream = MessageEncoderStream; + + +/***/ }), + +/***/ 2379: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageDecoderStream = void 0; +class SmithyMessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === undefined) + continue; + yield deserialized; + } + } +} +exports.SmithyMessageDecoderStream = SmithyMessageDecoderStream; + + +/***/ }), + +/***/ 2484: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageEncoderStream = void 0; +class SmithyMessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +} +exports.SmithyMessageEncoderStream = SmithyMessageEncoderStream; + + +/***/ }), + +/***/ 6459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(1014), exports); +tslib_1.__exportStar(__nccwpck_require__(4712), exports); +tslib_1.__exportStar(__nccwpck_require__(6086), exports); +tslib_1.__exportStar(__nccwpck_require__(3684), exports); +tslib_1.__exportStar(__nccwpck_require__(7255), exports); +tslib_1.__exportStar(__nccwpck_require__(2362), exports); +tslib_1.__exportStar(__nccwpck_require__(2379), exports); +tslib_1.__exportStar(__nccwpck_require__(2484), exports); + + +/***/ }), + +/***/ 597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitMessage = void 0; +const crc32_1 = __nccwpck_require__(7327); +const PRELUDE_MEMBER_LENGTH = 4; +const PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +const CHECKSUM_LENGTH = 4; +const MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new crc32_1.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error(`The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})`); + } + checksummer.update(new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH))); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error(`The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}`); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH)), + }; +} +exports.splitMessage = splitMessage; + + +/***/ }), + +/***/ 3081: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Hash = void 0; +const util_buffer_from_1 = __nccwpck_require__(1381); +const util_utf8_1 = __nccwpck_require__(1895); +const buffer_1 = __nccwpck_require__(4300); +const crypto_1 = __nccwpck_require__(6113); +class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, util_utf8_1.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? (0, crypto_1.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) + : (0, crypto_1.createHash)(this.algorithmIdentifier); + } +} +exports.Hash = Hash; +function castSourceData(toCast, encoding) { + if (buffer_1.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, util_buffer_from_1.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, util_buffer_from_1.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, util_buffer_from_1.fromArrayBuffer)(toCast); +} + + +/***/ }), + +/***/ 780: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArrayBuffer = void 0; +const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; +exports.isArrayBuffer = isArrayBuffer; + + +/***/ }), + +/***/ 2800: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getContentLengthPlugin = exports.contentLengthMiddlewareOptions = exports.contentLengthMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (protocol_http_1.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +exports.contentLengthMiddleware = contentLengthMiddleware; +exports.contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), exports.contentLengthMiddlewareOptions); + }, +}); +exports.getContentLengthPlugin = getContentLengthPlugin; + + +/***/ }), + +/***/ 465: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createConfigValueProvider = void 0; +const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + var _a; + const configValue = (_a = config[configKey]) !== null && _a !== void 0 ? _a : config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; +exports.createConfigValueProvider = createConfigValueProvider; + + +/***/ }), + +/***/ 3929: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveParams = exports.getEndpointFromInstructions = void 0; +const service_customizations_1 = __nccwpck_require__(3105); +const createConfigValueProvider_1 = __nccwpck_require__(465); +const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + const endpointParams = await (0, exports.resolveParams)(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +exports.getEndpointFromInstructions = getEndpointFromInstructions; +const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + var _a; + const endpointParams = {}; + const instructions = ((_a = instructionsSupplier === null || instructionsSupplier === void 0 ? void 0 : instructionsSupplier.getEndpointParameterInstructions) === null || _a === void 0 ? void 0 : _a.call(instructionsSupplier)) || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await (0, createConfigValueProvider_1.createConfigValueProvider)(instruction.name, name, clientConfig)(); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await (0, service_customizations_1.resolveParamsForS3)(endpointParams); + } + return endpointParams; +}; +exports.resolveParams = resolveParams; + + +/***/ }), + +/***/ 890: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(3929), exports); +tslib_1.__exportStar(__nccwpck_require__(8938), exports); + + +/***/ }), + +/***/ 8938: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toEndpointV1 = void 0; +const url_parser_1 = __nccwpck_require__(4681); +const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, url_parser_1.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, url_parser_1.parseUrl)(endpoint); +}; +exports.toEndpointV1 = toEndpointV1; + + +/***/ }), + +/***/ 5520: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.endpointMiddleware = void 0; +const getEndpointFromInstructions_1 = __nccwpck_require__(3929); +const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + var _a, _b; + const endpoint = await (0, getEndpointFromInstructions_1.getEndpointFromInstructions)(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = (_a = endpoint.properties) === null || _a === void 0 ? void 0 : _a.authSchemes; + const authScheme = (_b = context.authSchemes) === null || _b === void 0 ? void 0 : _b[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + } + return next({ + ...args, + }); + }; +}; +exports.endpointMiddleware = endpointMiddleware; + + +/***/ }), + +/***/ 1329: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointPlugin = exports.endpointMiddlewareOptions = void 0; +const middleware_serde_1 = __nccwpck_require__(1238); +const endpointMiddleware_1 = __nccwpck_require__(5520); +exports.endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: middleware_serde_1.serializerMiddlewareOption.name, +}; +const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, endpointMiddleware_1.endpointMiddleware)({ + config, + instructions, + }), exports.endpointMiddlewareOptions); + }, +}); +exports.getEndpointPlugin = getEndpointPlugin; + + +/***/ }), + +/***/ 2918: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(890), exports); +tslib_1.__exportStar(__nccwpck_require__(5520), exports); +tslib_1.__exportStar(__nccwpck_require__(1329), exports); +tslib_1.__exportStar(__nccwpck_require__(4139), exports); +tslib_1.__exportStar(__nccwpck_require__(9720), exports); + + +/***/ }), + +/***/ 4139: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const toEndpointV1_1 = __nccwpck_require__(8938); +const resolveEndpointConfig = (input) => { + var _a, _b, _c; + const tls = (_a = input.tls) !== null && _a !== void 0 ? _a : true; + const { endpoint } = input; + const customEndpointProvider = endpoint != null ? async () => (0, toEndpointV1_1.toEndpointV1)(await (0, util_middleware_1.normalizeProvider)(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + return { + ...input, + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + useFipsEndpoint: (0, util_middleware_1.normalizeProvider)((_c = input.useFipsEndpoint) !== null && _c !== void 0 ? _c : false), + }; +}; +exports.resolveEndpointConfig = resolveEndpointConfig; + + +/***/ }), + +/***/ 3105: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9194), exports); + + +/***/ }), + +/***/ 9194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArnBucketName = exports.isDnsCompatibleBucketName = exports.S3_HOSTNAME_PATTERN = exports.DOT_PATTERN = exports.resolveParamsForS3 = void 0; +const resolveParamsForS3 = async (endpointParams) => { + const bucket = (endpointParams === null || endpointParams === void 0 ? void 0 : endpointParams.Bucket) || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if ((0, exports.isArnBucketName)(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!(0, exports.isDnsCompatibleBucketName)(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +exports.resolveParamsForS3 = resolveParamsForS3; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +exports.DOT_PATTERN = /\./; +exports.S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +exports.isDnsCompatibleBucketName = isDnsCompatibleBucketName; +const isArnBucketName = (bucketName) => { + const [arn, partition, service, region, account, typeOrId] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = [arn, partition, service, account, typeOrId].filter(Boolean).length === 5; + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return arn === "arn" && !!partition && !!service && !!account && !!typeOrId; +}; +exports.isArnBucketName = isArnBucketName; + + +/***/ }), + +/***/ 9720: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AdaptiveRetryStrategy = void 0; +const util_retry_1 = __nccwpck_require__(4902); +const StandardRetryStrategy_1 = __nccwpck_require__(4582); +class AdaptiveRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options !== null && options !== void 0 ? options : {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new util_retry_1.DefaultRateLimiter(); + this.mode = util_retry_1.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; + + +/***/ }), + +/***/ 4582: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StandardRetryStrategy = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const service_error_classification_1 = __nccwpck_require__(6375); +const util_retry_1 = __nccwpck_require__(4902); +const uuid_1 = __nccwpck_require__(5840); +const defaultRetryQuota_1 = __nccwpck_require__(9991); +const delayDecider_1 = __nccwpck_require__(9465); +const retryDecider_1 = __nccwpck_require__(7653); +const util_1 = __nccwpck_require__(2827); +class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + var _a, _b, _c; + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = util_retry_1.RETRY_MODES.STANDARD; + this.retryDecider = (_a = options === null || options === void 0 ? void 0 : options.retryDecider) !== null && _a !== void 0 ? _a : retryDecider_1.defaultRetryDecider; + this.delayDecider = (_b = options === null || options === void 0 ? void 0 : options.delayDecider) !== null && _b !== void 0 ? _b : delayDecider_1.defaultDelayDecider; + this.retryQuota = (_c = options === null || options === void 0 ? void 0 : options.retryQuota) !== null && _c !== void 0 ? _c : (0, defaultRetryQuota_1.getDefaultRetryQuota)(util_retry_1.INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = util_retry_1.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options === null || options === void 0 ? void 0 : options.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options === null || options === void 0 ? void 0 : options.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = (0, util_1.asSdkError)(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider((0, service_error_classification_1.isThrottlingError)(err) ? util_retry_1.THROTTLING_RETRY_DELAY_BASE : util_retry_1.DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +exports.StandardRetryStrategy = StandardRetryStrategy; +const getDelayFromRetryAfterHeader = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; + + +/***/ }), + +/***/ 8709: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = exports.CONFIG_RETRY_MODE = exports.ENV_RETRY_MODE = exports.resolveRetryConfig = exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = exports.CONFIG_MAX_ATTEMPTS = exports.ENV_MAX_ATTEMPTS = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const util_retry_1 = __nccwpck_require__(4902); +exports.ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +exports.CONFIG_MAX_ATTEMPTS = "max_attempts"; +exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[exports.ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${exports.ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[exports.CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${exports.CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: util_retry_1.DEFAULT_MAX_ATTEMPTS, +}; +const resolveRetryConfig = (input) => { + var _a; + const { retryStrategy } = input; + const maxAttempts = (0, util_middleware_1.normalizeProvider)((_a = input.maxAttempts) !== null && _a !== void 0 ? _a : util_retry_1.DEFAULT_MAX_ATTEMPTS); + return { + ...input, + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, util_middleware_1.normalizeProvider)(input.retryMode)(); + if (retryMode === util_retry_1.RETRY_MODES.ADAPTIVE) { + return new util_retry_1.AdaptiveRetryStrategy(maxAttempts); + } + return new util_retry_1.StandardRetryStrategy(maxAttempts); + }, + }; +}; +exports.resolveRetryConfig = resolveRetryConfig; +exports.ENV_RETRY_MODE = "AWS_RETRY_MODE"; +exports.CONFIG_RETRY_MODE = "retry_mode"; +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_RETRY_MODE], + configFileSelector: (profile) => profile[exports.CONFIG_RETRY_MODE], + default: util_retry_1.DEFAULT_RETRY_MODE, +}; + + +/***/ }), + +/***/ 9991: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRetryQuota = void 0; +const util_retry_1 = __nccwpck_require__(4902); +const getDefaultRetryQuota = (initialRetryTokens, options) => { + var _a, _b, _c; + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = (_a = options === null || options === void 0 ? void 0 : options.noRetryIncrement) !== null && _a !== void 0 ? _a : util_retry_1.NO_RETRY_INCREMENT; + const retryCost = (_b = options === null || options === void 0 ? void 0 : options.retryCost) !== null && _b !== void 0 ? _b : util_retry_1.RETRY_COST; + const timeoutRetryCost = (_c = options === null || options === void 0 ? void 0 : options.timeoutRetryCost) !== null && _c !== void 0 ? _c : util_retry_1.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount !== null && capacityReleaseAmount !== void 0 ? capacityReleaseAmount : noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; +exports.getDefaultRetryQuota = getDefaultRetryQuota; + + +/***/ }), + +/***/ 9465: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultDelayDecider = void 0; +const util_retry_1 = __nccwpck_require__(4902); +const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(util_retry_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); +exports.defaultDelayDecider = defaultDelayDecider; + + +/***/ }), + +/***/ 6039: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(155), exports); +tslib_1.__exportStar(__nccwpck_require__(4582), exports); +tslib_1.__exportStar(__nccwpck_require__(8709), exports); +tslib_1.__exportStar(__nccwpck_require__(9465), exports); +tslib_1.__exportStar(__nccwpck_require__(6556), exports); +tslib_1.__exportStar(__nccwpck_require__(7653), exports); +tslib_1.__exportStar(__nccwpck_require__(1434), exports); + + +/***/ }), + +/***/ 6556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOmitRetryHeadersPlugin = exports.omitRetryHeadersMiddlewareOptions = exports.omitRetryHeadersMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const util_retry_1 = __nccwpck_require__(4902); +const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + delete request.headers[util_retry_1.INVOCATION_ID_HEADER]; + delete request.headers[util_retry_1.REQUEST_HEADER]; + } + return next(args); +}; +exports.omitRetryHeadersMiddleware = omitRetryHeadersMiddleware; +exports.omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, exports.omitRetryHeadersMiddleware)(), exports.omitRetryHeadersMiddlewareOptions); + }, +}); +exports.getOmitRetryHeadersPlugin = getOmitRetryHeadersPlugin; + + +/***/ }), + +/***/ 7653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultRetryDecider = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return (0, service_error_classification_1.isRetryableByTrait)(error) || (0, service_error_classification_1.isClockSkewError)(error) || (0, service_error_classification_1.isThrottlingError)(error) || (0, service_error_classification_1.isTransientError)(error); +}; +exports.defaultRetryDecider = defaultRetryDecider; + + +/***/ }), + +/***/ 1434: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryAfterHint = exports.getRetryPlugin = exports.retryMiddlewareOptions = exports.retryMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const service_error_classification_1 = __nccwpck_require__(6375); +const util_retry_1 = __nccwpck_require__(4902); +const uuid_1 = __nccwpck_require__(5840); +const util_1 = __nccwpck_require__(2827); +const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = (0, util_1.asSdkError)(e); + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy === null || retryStrategy === void 0 ? void 0 : retryStrategy.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +exports.retryMiddleware = retryMiddleware; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + errorType: getRetryErrorType(error), + }; + const retryAfterHint = (0, exports.getRetryAfterHint)(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if ((0, service_error_classification_1.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, service_error_classification_1.isTransientError)(error)) + return "TRANSIENT"; + if ((0, service_error_classification_1.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +exports.retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.retryMiddleware)(options), exports.retryMiddlewareOptions); + }, +}); +exports.getRetryPlugin = getRetryPlugin; +const getRetryAfterHint = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; +exports.getRetryAfterHint = getRetryAfterHint; + + +/***/ }), + +/***/ 2827: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.asSdkError = void 0; +const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; +exports.asSdkError = asSdkError; + + +/***/ }), + +/***/ 9922: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deserializerMiddleware = void 0; +const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + error.message += "\n " + hint; + } + throw error; + } +}; +exports.deserializerMiddleware = deserializerMiddleware; + + +/***/ }), + +/***/ 1238: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9922), exports); +tslib_1.__exportStar(__nccwpck_require__(6485), exports); +tslib_1.__exportStar(__nccwpck_require__(3566), exports); + + +/***/ }), + +/***/ 6485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSerdePlugin = exports.serializerMiddlewareOption = exports.deserializerMiddlewareOption = void 0; +const deserializerMiddleware_1 = __nccwpck_require__(9922); +const serializerMiddleware_1 = __nccwpck_require__(3566); +exports.deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +exports.serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add((0, deserializerMiddleware_1.deserializerMiddleware)(config, deserializer), exports.deserializerMiddlewareOption); + commandStack.add((0, serializerMiddleware_1.serializerMiddleware)(config, serializer), exports.serializerMiddlewareOption); + }, + }; +} +exports.getSerdePlugin = getSerdePlugin; + + +/***/ }), + +/***/ 3566: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializerMiddleware = void 0; +const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + var _a; + const endpoint = ((_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.url) && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; +exports.serializerMiddleware = serializerMiddleware; + + +/***/ }), + +/***/ 2404: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.constructStack = void 0; +const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.name && entry.name === toRemove) { + isRemoved = true; + entriesNameSet.delete(toRemove); + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + if (entry.name) + entriesNameSet.delete(entry.name); + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + if (normalizedEntry.name) + normalizedEntriesNameMap[normalizedEntry.name] = normalizedEntry; + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + if (normalizedEntry.name) + normalizedEntriesNameMap[normalizedEntry.name] = normalizedEntry; + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ${entry.name || "anonymous"} middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + if (name) { + if (entriesNameSet.has(name)) { + if (!override) + throw new Error(`Duplicate middleware name '${name}'`); + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === name); + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || toOverride.priority !== entry.priority) { + throw new Error(`"${name}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be ` + + `overridden by same-name middleware with ${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + entriesNameSet.add(name); + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override } = options; + const entry = { + middleware, + ...options, + }; + if (name) { + if (entriesNameSet.has(name)) { + if (!override) + throw new Error(`Duplicate middleware name '${name}'`); + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === name); + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${name}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by same-name middleware ${entry.relation} "${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + entriesNameSet.add(name); + } + relativeEntries.push(entry); + }, + clone: () => cloneTo((0, exports.constructStack)()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name } = entry; + if (tags && tags.includes(toRemove)) { + if (name) + entriesNameSet.delete(name); + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo((0, exports.constructStack)()); + cloned.use(from); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + return mw.name + ": " + (mw.tags || []).join(","); + }); + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + return handler; + }, + }; + return stack; +}; +exports.constructStack = constructStack; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; + + +/***/ }), + +/***/ 7911: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2404), exports); + + +/***/ }), + +/***/ 4766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfig = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fromEnv_1 = __nccwpck_require__(5606); +const fromSharedConfigFiles_1 = __nccwpck_require__(5784); +const fromStatic_1 = __nccwpck_require__(3091); +const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromEnv_1.fromEnv)(environmentVariableSelector), (0, fromSharedConfigFiles_1.fromSharedConfigFiles)(configFileSelector, configuration), (0, fromStatic_1.fromStatic)(defaultValue))); +exports.loadConfig = loadConfig; + + +/***/ }), + +/***/ 5606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromEnv = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const fromEnv = (envVarSelector) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config from environment variables with getter: ${envVarSelector}`); + } +}; +exports.fromEnv = fromEnv; + + +/***/ }), + +/***/ 5784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSharedConfigFiles = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const shared_ini_file_loader_1 = __nccwpck_require__(3507); +const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, shared_ini_file_loader_1.getProfileName)(init); + const { configFile, credentialsFile } = await (0, shared_ini_file_loader_1.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const configValue = configSelector(mergedProfile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}`); + } +}; +exports.fromSharedConfigFiles = fromSharedConfigFiles; + + +/***/ }), + +/***/ 3091: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const property_provider_1 = __nccwpck_require__(9721); +const isFunction = (func) => typeof func === "function"; +const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, property_provider_1.fromStatic)(defaultValue); +exports.fromStatic = fromStatic; + + +/***/ }), + +/***/ 3461: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4766), exports); + + +/***/ }), + +/***/ 3946: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = void 0; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + + +/***/ }), + +/***/ 508: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getTransformedHeaders = void 0; +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +exports.getTransformedHeaders = getTransformedHeaders; + + +/***/ }), + +/***/ 258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(6948), exports); +tslib_1.__exportStar(__nccwpck_require__(6999), exports); +tslib_1.__exportStar(__nccwpck_require__(1030), exports); + + +/***/ }), + +/***/ 6948: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttpHandler = exports.DEFAULT_REQUEST_TIMEOUT = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const querystring_builder_1 = __nccwpck_require__(8031); +const http_1 = __nccwpck_require__(3685); +const https_1 = __nccwpck_require__(5687); +const constants_1 = __nccwpck_require__(3946); +const get_transformed_headers_1 = __nccwpck_require__(508); +const set_connection_timeout_1 = __nccwpck_require__(5545); +const set_socket_keep_alive_1 = __nccwpck_require__(3751); +const set_socket_timeout_1 = __nccwpck_require__(2618); +const write_request_body_1 = __nccwpck_require__(3766); +exports.DEFAULT_REQUEST_TIMEOUT = 0; +class NodeHttpHandler { + constructor(options) { + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout !== null && requestTimeout !== void 0 ? requestTimeout : socketTimeout, + httpAgent: httpAgent || new http_1.Agent({ keepAlive, maxSockets }), + httpsAgent: httpsAgent || new https_1.Agent({ keepAlive, maxSockets }), + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) === null || _a === void 0 ? void 0 : _a.httpAgent) === null || _b === void 0 ? void 0 : _b.destroy(); + (_d = (_c = this.config) === null || _c === void 0 ? void 0 : _c.httpsAgent) === null || _d === void 0 ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + var _a, _b; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const queryString = (0, querystring_builder_1.buildQueryString)(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const nodeHttpsOptions = { + headers: request.headers, + host: request.hostname, + method: request.method, + path, + port: request.port, + agent: isSSL ? this.config.httpsAgent : this.config.httpAgent, + auth, + }; + const requestFunc = isSSL ? https_1.request : http_1.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + (0, set_connection_timeout_1.setConnectionTimeout)(req, reject, this.config.connectionTimeout); + (0, set_socket_timeout_1.setSocketTimeout)(req, reject, this.config.requestTimeout); + if (abortSignal) { + abortSignal.onabort = () => { + req.abort(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + } + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + (0, set_socket_keep_alive_1.setSocketKeepAlive)(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + }); + } + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, this.config.requestTimeout).catch(_reject); + }); + } +} +exports.NodeHttpHandler = NodeHttpHandler; + + +/***/ }), + +/***/ 5771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionManager = void 0; +const tslib_1 = __nccwpck_require__(4351); +const http2_1 = tslib_1.__importDefault(__nccwpck_require__(5158)); +const node_http2_connection_pool_1 = __nccwpck_require__(5157); +class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2_1.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new node_http2_connection_pool_1.NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) === null || _a === void 0 ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} +exports.NodeHttp2ConnectionManager = NodeHttp2ConnectionManager; + + +/***/ }), + +/***/ 5157: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionPool = void 0; +class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions !== null && sessions !== void 0 ? sessions : []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} +exports.NodeHttp2ConnectionPool = NodeHttp2ConnectionPool; + + +/***/ }), + +/***/ 6999: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2Handler = void 0; +const protocol_http_1 = __nccwpck_require__(4418); +const querystring_builder_1 = __nccwpck_require__(8031); +const http2_1 = __nccwpck_require__(5158); +const get_transformed_headers_1 = __nccwpck_require__(508); +const node_http2_connection_manager_1 = __nccwpck_require__(5771); +const write_request_body_1 = __nccwpck_require__(3766); +class NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new node_http2_connection_manager_1.NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a, _b, _c; + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_c = this.config) === null || _c === void 0 ? void 0 : _c.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = (0, querystring_builder_1.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [http2_1.constants.HTTP2_HEADER_PATH]: path, + [http2_1.constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + abortSignal.onabort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, requestTimeout); + }); + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} +exports.NodeHttp2Handler = NodeHttp2Handler; + + +/***/ }), + +/***/ 5545: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setConnectionTimeout = void 0; +const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return; + } + const timeoutId = setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs); + request.on("socket", (socket) => { + if (socket.connecting) { + socket.on("connect", () => { + clearTimeout(timeoutId); + }); + } + else { + clearTimeout(timeoutId); + } + }); +}; +exports.setConnectionTimeout = setConnectionTimeout; + + +/***/ }), + +/***/ 3751: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketKeepAlive = void 0; +const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }) => { + if (keepAlive !== true) { + return; + } + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); +}; +exports.setSocketKeepAlive = setSocketKeepAlive; + + +/***/ }), + +/***/ 2618: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketTimeout = void 0; +const setSocketTimeout = (request, reject, timeoutInMs = 0) => { + request.setTimeout(timeoutInMs, () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }); +}; +exports.setSocketTimeout = setSocketTimeout; + + +/***/ }), + +/***/ 3211: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Collector = void 0; +const stream_1 = __nccwpck_require__(2781); +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} +exports.Collector = Collector; + + +/***/ }), + +/***/ 1030: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.streamCollector = void 0; +const collector_1 = __nccwpck_require__(3211); +const streamCollector = (stream) => new Promise((resolve, reject) => { + const collector = new collector_1.Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); +}); +exports.streamCollector = streamCollector; + + +/***/ }), + +/***/ 3766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeRequestBody = void 0; +const stream_1 = __nccwpck_require__(2781); +const MIN_WAIT_TIME = 1000; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + var _a; + const headers = (_a = request.headers) !== null && _a !== void 0 ? _a : {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let hasError = false; + if (expect === "100-continue") { + await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + clearTimeout(timeoutId); + resolve(); + }); + httpRequest.on("error", () => { + hasError = true; + clearTimeout(timeoutId); + resolve(); + }); + }), + ]); + } + if (!hasError) { + writeBody(httpRequest, request.body); + } +} +exports.writeRequestBody = writeRequestBody; +function writeBody(httpRequest, body) { + if (body instanceof stream_1.Readable) { + body.pipe(httpRequest); + } + else if (body) { + httpRequest.end(Buffer.from(body)); + } + else { + httpRequest.end(); + } +} + + +/***/ }), + +/***/ 3936: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CredentialsProviderError = void 0; +const ProviderError_1 = __nccwpck_require__(3324); +class CredentialsProviderError extends ProviderError_1.ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); + } +} +exports.CredentialsProviderError = CredentialsProviderError; + + +/***/ }), + +/***/ 3324: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProviderError = void 0; +class ProviderError extends Error { + constructor(message, tryNextLink = true) { + super(message); + this.tryNextLink = tryNextLink; + this.name = "ProviderError"; + Object.setPrototypeOf(this, ProviderError.prototype); + } + static from(error, tryNextLink = true) { + return Object.assign(new this(error.message, tryNextLink), error); + } +} +exports.ProviderError = ProviderError; + + +/***/ }), + +/***/ 429: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TokenProviderError = void 0; +const ProviderError_1 = __nccwpck_require__(3324); +class TokenProviderError extends ProviderError_1.ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); + } +} +exports.TokenProviderError = TokenProviderError; + + +/***/ }), + +/***/ 5079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.chain = void 0; +const ProviderError_1 = __nccwpck_require__(3324); +function chain(...providers) { + return () => { + let promise = Promise.reject(new ProviderError_1.ProviderError("No providers in chain")); + for (const provider of providers) { + promise = promise.catch((err) => { + if (err === null || err === void 0 ? void 0 : err.tryNextLink) { + return provider(); + } + throw err; + }); + } + return promise; + }; +} +exports.chain = chain; + + +/***/ }), + +/***/ 1322: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const fromStatic = (staticValue) => () => Promise.resolve(staticValue); +exports.fromStatic = fromStatic; + + +/***/ }), + +/***/ 9721: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(3936), exports); +tslib_1.__exportStar(__nccwpck_require__(3324), exports); +tslib_1.__exportStar(__nccwpck_require__(429), exports); +tslib_1.__exportStar(__nccwpck_require__(5079), exports); +tslib_1.__exportStar(__nccwpck_require__(1322), exports); +tslib_1.__exportStar(__nccwpck_require__(9762), exports); + + +/***/ }), + +/***/ 9762: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.memoize = void 0; +const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; +exports.memoize = memoize; + + +/***/ }), + +/***/ 9179: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Field = void 0; +const types_1 = __nccwpck_require__(5756); +class Field { + constructor({ name, kind = types_1.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} +exports.Field = Field; + + +/***/ }), + +/***/ 9242: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Fields = void 0; +class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} +exports.Fields = Fields; + + +/***/ }), + +/***/ 3206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8746: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpRequest = void 0; +class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static isInstance(request) { + if (!request) + return false; + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + const cloned = new HttpRequest({ + ...this, + headers: { ...this.headers }, + }); + if (cloned.query) + cloned.query = cloneQuery(cloned.query); + return cloned; + } +} +exports.HttpRequest = HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} + + +/***/ }), + +/***/ 6322: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpResponse = void 0; +class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} +exports.HttpResponse = HttpResponse; + + +/***/ }), + +/***/ 4418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9179), exports); +tslib_1.__exportStar(__nccwpck_require__(9242), exports); +tslib_1.__exportStar(__nccwpck_require__(3206), exports); +tslib_1.__exportStar(__nccwpck_require__(8746), exports); +tslib_1.__exportStar(__nccwpck_require__(6322), exports); +tslib_1.__exportStar(__nccwpck_require__(1466), exports); +tslib_1.__exportStar(__nccwpck_require__(9135), exports); + + +/***/ }), + +/***/ 1466: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostname = void 0; +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +exports.isValidHostname = isValidHostname; + + +/***/ }), + +/***/ 9135: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8031: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildQueryString = void 0; +const util_uri_escape_1 = __nccwpck_require__(4197); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, util_uri_escape_1.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, util_uri_escape_1.escapeUri)(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +exports.buildQueryString = buildQueryString; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseQueryString = void 0; +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } + } + } + return query; +} +exports.parseQueryString = parseQueryString; + + +/***/ }), + +/***/ 8415: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = exports.TRANSIENT_ERROR_STATUS_CODES = exports.TRANSIENT_ERROR_CODES = exports.THROTTLING_ERROR_CODES = exports.CLOCK_SKEW_ERROR_CODES = void 0; +exports.CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +exports.THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +exports.TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +exports.TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + + +/***/ }), + +/***/ 6375: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isServerError = exports.isTransientError = exports.isThrottlingError = exports.isClockSkewError = exports.isRetryableByTrait = void 0; +const constants_1 = __nccwpck_require__(8415); +const isRetryableByTrait = (error) => error.$retryable !== undefined; +exports.isRetryableByTrait = isRetryableByTrait; +const isClockSkewError = (error) => constants_1.CLOCK_SKEW_ERROR_CODES.includes(error.name); +exports.isClockSkewError = isClockSkewError; +const isThrottlingError = (error) => { + var _a, _b; + return ((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) === 429 || + constants_1.THROTTLING_ERROR_CODES.includes(error.name) || + ((_b = error.$retryable) === null || _b === void 0 ? void 0 : _b.throttling) == true; +}; +exports.isThrottlingError = isThrottlingError; +const isTransientError = (error) => { + var _a; + return constants_1.TRANSIENT_ERROR_CODES.includes(error.name) || + constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes((error === null || error === void 0 ? void 0 : error.code) || "") || + constants_1.TRANSIENT_ERROR_STATUS_CODES.includes(((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) || 0); +}; +exports.isTransientError = isTransientError; +const isServerError = (error) => { + var _a; + if (((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !(0, exports.isTransientError)(error)) { + return true; + } + return false; + } + return false; +}; +exports.isServerError = isServerError; + + +/***/ }), + +/***/ 7237: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getConfigFilepath = exports.ENV_CONFIG_PATH = void 0; +const path_1 = __nccwpck_require__(1017); +const getHomeDir_1 = __nccwpck_require__(8193); +exports.ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +const getConfigFilepath = () => process.env[exports.ENV_CONFIG_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "config"); +exports.getConfigFilepath = getConfigFilepath; + + +/***/ }), + +/***/ 9036: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCredentialsFilepath = exports.ENV_CREDENTIALS_PATH = void 0; +const path_1 = __nccwpck_require__(1017); +const getHomeDir_1 = __nccwpck_require__(8193); +exports.ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +const getCredentialsFilepath = () => process.env[exports.ENV_CREDENTIALS_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "credentials"); +exports.getCredentialsFilepath = getCredentialsFilepath; + + +/***/ }), + +/***/ 8193: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHomeDir = void 0; +const os_1 = __nccwpck_require__(2037); +const path_1 = __nccwpck_require__(1017); +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + return (0, os_1.homedir)(); +}; +exports.getHomeDir = getHomeDir; + + +/***/ }), + +/***/ 2041: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProfileData = void 0; +const profileKeyRegex = /^profile\s(["'])?([^\1]+)\1$/; +const getProfileData = (data) => Object.entries(data) + .filter(([key]) => profileKeyRegex.test(key)) + .reduce((acc, [key, value]) => ({ ...acc, [profileKeyRegex.exec(key)[2]]: value }), { + ...(data.default && { default: data.default }), +}); +exports.getProfileData = getProfileData; + + +/***/ }), + +/***/ 2802: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProfileName = exports.DEFAULT_PROFILE = exports.ENV_PROFILE = void 0; +exports.ENV_PROFILE = "AWS_PROFILE"; +exports.DEFAULT_PROFILE = "default"; +const getProfileName = (init) => init.profile || process.env[exports.ENV_PROFILE] || exports.DEFAULT_PROFILE; +exports.getProfileName = getProfileName; + + +/***/ }), + +/***/ 4740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = __nccwpck_require__(6113); +const path_1 = __nccwpck_require__(1017); +const getHomeDir_1 = __nccwpck_require__(8193); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; + + +/***/ }), + +/***/ 9678: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFromFile = void 0; +const fs_1 = __nccwpck_require__(7147); +const getSSOTokenFilepath_1 = __nccwpck_require__(4740); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; + + +/***/ }), + +/***/ 2820: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSsoSessionData = void 0; +const ssoSessionKeyRegex = /^sso-session\s(["'])?([^\1]+)\1$/; +const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => ssoSessionKeyRegex.test(key)) + .reduce((acc, [key, value]) => ({ ...acc, [ssoSessionKeyRegex.exec(key)[2]]: value }), {}); +exports.getSsoSessionData = getSsoSessionData; + + +/***/ }), + +/***/ 3507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(8193), exports); +tslib_1.__exportStar(__nccwpck_require__(2802), exports); +tslib_1.__exportStar(__nccwpck_require__(4740), exports); +tslib_1.__exportStar(__nccwpck_require__(9678), exports); +tslib_1.__exportStar(__nccwpck_require__(1879), exports); +tslib_1.__exportStar(__nccwpck_require__(4649), exports); +tslib_1.__exportStar(__nccwpck_require__(2546), exports); +tslib_1.__exportStar(__nccwpck_require__(3191), exports); + + +/***/ }), + +/***/ 1879: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadSharedConfigFiles = void 0; +const getConfigFilepath_1 = __nccwpck_require__(7237); +const getCredentialsFilepath_1 = __nccwpck_require__(9036); +const getProfileData_1 = __nccwpck_require__(2041); +const parseIni_1 = __nccwpck_require__(4262); +const slurpFile_1 = __nccwpck_require__(9155); +const swallowError = () => ({}); +const loadSharedConfigFiles = async (init = {}) => { + const { filepath = (0, getCredentialsFilepath_1.getCredentialsFilepath)(), configFilepath = (0, getConfigFilepath_1.getConfigFilepath)() } = init; + const parsedFiles = await Promise.all([ + (0, slurpFile_1.slurpFile)(configFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni_1.parseIni) + .then(getProfileData_1.getProfileData) + .catch(swallowError), + (0, slurpFile_1.slurpFile)(filepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni_1.parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; +exports.loadSharedConfigFiles = loadSharedConfigFiles; + + +/***/ }), + +/***/ 4649: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadSsoSessionData = void 0; +const getConfigFilepath_1 = __nccwpck_require__(7237); +const getSsoSessionData_1 = __nccwpck_require__(2820); +const parseIni_1 = __nccwpck_require__(4262); +const slurpFile_1 = __nccwpck_require__(9155); +const swallowError = () => ({}); +const loadSsoSessionData = async (init = {}) => { + var _a; + return (0, slurpFile_1.slurpFile)((_a = init.configFilepath) !== null && _a !== void 0 ? _a : (0, getConfigFilepath_1.getConfigFilepath)()) + .then(parseIni_1.parseIni) + .then(getSsoSessionData_1.getSsoSessionData) + .catch(swallowError); +}; +exports.loadSsoSessionData = loadSsoSessionData; + + +/***/ }), + +/***/ 9447: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mergeConfigFiles = void 0; +const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } + } + } + return merged; +}; +exports.mergeConfigFiles = mergeConfigFiles; + + +/***/ }), + +/***/ 4262: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseIni = void 0; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +const parseIni = (iniData) => { + const map = {}; + let currentSection; + for (let line of iniData.split(/\r?\n/)) { + line = line.split(/(^|\s)[;#]/)[0].trim(); + const isSection = line[0] === "[" && line[line.length - 1] === "]"; + if (isSection) { + currentSection = line.substring(1, line.length - 1); + if (profileNameBlockList.includes(currentSection)) { + throw new Error(`Found invalid profile name "${currentSection}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = line.indexOf("="); + const start = 0; + const end = line.length - 1; + const isAssignment = indexOfEqualsSign !== -1 && indexOfEqualsSign !== start && indexOfEqualsSign !== end; + if (isAssignment) { + const [name, value] = [ + line.substring(0, indexOfEqualsSign).trim(), + line.substring(indexOfEqualsSign + 1).trim(), + ]; + map[currentSection] = map[currentSection] || {}; + map[currentSection][name] = value; + } + } + } + return map; +}; +exports.parseIni = parseIni; + + +/***/ }), + +/***/ 2546: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseKnownFiles = void 0; +const loadSharedConfigFiles_1 = __nccwpck_require__(1879); +const mergeConfigFiles_1 = __nccwpck_require__(9447); +const parseKnownFiles = async (init) => { + const parsedFiles = await (0, loadSharedConfigFiles_1.loadSharedConfigFiles)(init); + return (0, mergeConfigFiles_1.mergeConfigFiles)(parsedFiles.configFile, parsedFiles.credentialsFile); +}; +exports.parseKnownFiles = parseKnownFiles; + + +/***/ }), + +/***/ 9155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.slurpFile = void 0; +const fs_1 = __nccwpck_require__(7147); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; + + +/***/ }), + +/***/ 3191: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9733: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SignatureV4 = void 0; +const eventstream_codec_1 = __nccwpck_require__(6459); +const util_hex_encoding_1 = __nccwpck_require__(5364); +const util_middleware_1 = __nccwpck_require__(2390); +const util_utf8_1 = __nccwpck_require__(1895); +const constants_1 = __nccwpck_require__(8644); +const credentialDerivation_1 = __nccwpck_require__(9623); +const getCanonicalHeaders_1 = __nccwpck_require__(1393); +const getCanonicalQuery_1 = __nccwpck_require__(3243); +const getPayloadHash_1 = __nccwpck_require__(8545); +const headerUtil_1 = __nccwpck_require__(2179); +const moveHeadersToQuery_1 = __nccwpck_require__(9828); +const prepareRequest_1 = __nccwpck_require__(75); +const utilDate_1 = __nccwpck_require__(9299); +class SignatureV4 { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.headerMarshaller = new eventstream_codec_1.HeaderMarshaller(util_utf8_1.toUtf8, util_utf8_1.fromUtf8); + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, util_middleware_1.normalizeProvider)(region); + this.credentialProvider = (0, util_middleware_1.normalizeProvider)(credentials); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { longDate, shortDate } = formatDate(signingDate); + if (expiresIn > constants_1.MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const request = (0, moveHeadersToQuery_1.moveHeadersToQuery)((0, prepareRequest_1.prepareRequest)(originalRequest), { unhoistableHeaders }); + if (credentials.sessionToken) { + request.query[constants_1.TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[constants_1.ALGORITHM_QUERY_PARAM] = constants_1.ALGORITHM_IDENTIFIER; + request.query[constants_1.CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[constants_1.AMZ_DATE_QUERY_PARAM] = longDate; + request.query[constants_1.EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + request.query[constants_1.SIGNED_HEADERS_QUERY_PARAM] = getCanonicalHeaderList(canonicalHeaders); + request.query[constants_1.SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await (0, getPayloadHash_1.getPayloadHash)(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate, longDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const hashedPayload = await (0, getPayloadHash_1.getPayloadHash)({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, util_hex_encoding_1.toHex)(await hash.digest()); + const stringToSign = [ + constants_1.EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerMarshaller.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate } = formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const request = (0, prepareRequest_1.prepareRequest)(requestToSign); + const { longDate, shortDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + request.headers[constants_1.AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[constants_1.TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await (0, getPayloadHash_1.getPayloadHash)(request, this.sha256); + if (!(0, headerUtil_1.hasHeader)(constants_1.SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[constants_1.SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[constants_1.AUTH_HEADER] = + `${constants_1.ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${(0, getCanonicalQuery_1.getCanonicalQuery)(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest) { + const hash = new this.sha256(); + hash.update((0, util_utf8_1.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${constants_1.ALGORITHM_IDENTIFIER} +${longDate} +${credentialScope} +${(0, util_hex_encoding_1.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if ((pathSegment === null || pathSegment === void 0 ? void 0 : pathSegment.length) === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${(path === null || path === void 0 ? void 0 : path.startsWith("/")) ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && (path === null || path === void 0 ? void 0 : path.endsWith("/")) ? "/" : ""}`; + const doubleEncoded = encodeURIComponent(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest); + const hash = new this.sha256(await keyPromise); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return (0, credentialDerivation_1.getSigningKey)(this.sha256, credentials, shortDate, region, service || this.service); + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } +} +exports.SignatureV4 = SignatureV4; +const formatDate = (now) => { + const longDate = (0, utilDate_1.iso8601)(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; +}; +const getCanonicalHeaderList = (headers) => Object.keys(headers).sort().join(";"); + + +/***/ }), + +/***/ 9098: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cloneQuery = exports.cloneRequest = void 0; +const cloneRequest = ({ headers, query, ...rest }) => ({ + ...rest, + headers: { ...headers }, + query: query ? (0, exports.cloneQuery)(query) : undefined, +}); +exports.cloneRequest = cloneRequest; +const cloneQuery = (query) => Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; +}, {}); +exports.cloneQuery = cloneQuery; + + +/***/ }), + +/***/ 8644: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MAX_PRESIGNED_TTL = exports.KEY_TYPE_IDENTIFIER = exports.MAX_CACHE_SIZE = exports.UNSIGNED_PAYLOAD = exports.EVENT_ALGORITHM_IDENTIFIER = exports.ALGORITHM_IDENTIFIER_V4A = exports.ALGORITHM_IDENTIFIER = exports.UNSIGNABLE_PATTERNS = exports.SEC_HEADER_PATTERN = exports.PROXY_HEADER_PATTERN = exports.ALWAYS_UNSIGNABLE_HEADERS = exports.HOST_HEADER = exports.TOKEN_HEADER = exports.SHA256_HEADER = exports.SIGNATURE_HEADER = exports.GENERATED_HEADERS = exports.DATE_HEADER = exports.AMZ_DATE_HEADER = exports.AUTH_HEADER = exports.REGION_SET_PARAM = exports.TOKEN_QUERY_PARAM = exports.SIGNATURE_QUERY_PARAM = exports.EXPIRES_QUERY_PARAM = exports.SIGNED_HEADERS_QUERY_PARAM = exports.AMZ_DATE_QUERY_PARAM = exports.CREDENTIAL_QUERY_PARAM = exports.ALGORITHM_QUERY_PARAM = void 0; +exports.ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +exports.CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +exports.AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +exports.SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +exports.EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +exports.SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +exports.TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +exports.REGION_SET_PARAM = "X-Amz-Region-Set"; +exports.AUTH_HEADER = "authorization"; +exports.AMZ_DATE_HEADER = exports.AMZ_DATE_QUERY_PARAM.toLowerCase(); +exports.DATE_HEADER = "date"; +exports.GENERATED_HEADERS = [exports.AUTH_HEADER, exports.AMZ_DATE_HEADER, exports.DATE_HEADER]; +exports.SIGNATURE_HEADER = exports.SIGNATURE_QUERY_PARAM.toLowerCase(); +exports.SHA256_HEADER = "x-amz-content-sha256"; +exports.TOKEN_HEADER = exports.TOKEN_QUERY_PARAM.toLowerCase(); +exports.HOST_HEADER = "host"; +exports.ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +exports.PROXY_HEADER_PATTERN = /^proxy-/; +exports.SEC_HEADER_PATTERN = /^sec-/; +exports.UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +exports.ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +exports.ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +exports.EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +exports.UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +exports.MAX_CACHE_SIZE = 50; +exports.KEY_TYPE_IDENTIFIER = "aws4_request"; +exports.MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + + +/***/ }), + +/***/ 9623: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.clearCredentialCache = exports.getSigningKey = exports.createScope = void 0; +const util_hex_encoding_1 = __nccwpck_require__(5364); +const util_utf8_1 = __nccwpck_require__(1895); +const constants_1 = __nccwpck_require__(8644); +const signingKeyCache = {}; +const cacheQueue = []; +const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${constants_1.KEY_TYPE_IDENTIFIER}`; +exports.createScope = createScope; +const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, util_hex_encoding_1.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > constants_1.MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, constants_1.KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +exports.getSigningKey = getSigningKey; +const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +exports.clearCredentialCache = clearCredentialCache; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, util_utf8_1.toUint8Array)(data)); + return hash.digest(); +}; + + +/***/ }), + +/***/ 1393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalHeaders = void 0; +const constants_1 = __nccwpck_require__(8644); +const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in constants_1.ALWAYS_UNSIGNABLE_HEADERS || + (unsignableHeaders === null || unsignableHeaders === void 0 ? void 0 : unsignableHeaders.has(canonicalHeaderName)) || + constants_1.PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + constants_1.SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; +exports.getCanonicalHeaders = getCanonicalHeaders; + + +/***/ }), + +/***/ 3243: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalQuery = void 0; +const util_uri_escape_1 = __nccwpck_require__(4197); +const constants_1 = __nccwpck_require__(8644); +const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query).sort()) { + if (key.toLowerCase() === constants_1.SIGNATURE_HEADER) { + continue; + } + keys.push(key); + const value = query[key]; + if (typeof value === "string") { + serialized[key] = `${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + else if (Array.isArray(value)) { + serialized[key] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; +exports.getCanonicalQuery = getCanonicalQuery; + + +/***/ }), + +/***/ 8545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPayloadHash = void 0; +const is_array_buffer_1 = __nccwpck_require__(780); +const util_hex_encoding_1 = __nccwpck_require__(5364); +const util_utf8_1 = __nccwpck_require__(1895); +const constants_1 = __nccwpck_require__(8644); +const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === constants_1.SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, is_array_buffer_1.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, util_utf8_1.toUint8Array)(body)); + return (0, util_hex_encoding_1.toHex)(await hashCtor.digest()); + } + return constants_1.UNSIGNED_PAYLOAD; +}; +exports.getPayloadHash = getPayloadHash; + + +/***/ }), + +/***/ 2179: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteHeader = exports.getHeaderValue = exports.hasHeader = void 0; +const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +exports.hasHeader = hasHeader; +const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +exports.getHeaderValue = getHeaderValue; +const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; +exports.deleteHeader = deleteHeader; + + +/***/ }), + +/***/ 1528: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = exports.moveHeadersToQuery = exports.getPayloadHash = exports.getCanonicalQuery = exports.getCanonicalHeaders = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9733), exports); +var getCanonicalHeaders_1 = __nccwpck_require__(1393); +Object.defineProperty(exports, "getCanonicalHeaders", ({ enumerable: true, get: function () { return getCanonicalHeaders_1.getCanonicalHeaders; } })); +var getCanonicalQuery_1 = __nccwpck_require__(3243); +Object.defineProperty(exports, "getCanonicalQuery", ({ enumerable: true, get: function () { return getCanonicalQuery_1.getCanonicalQuery; } })); +var getPayloadHash_1 = __nccwpck_require__(8545); +Object.defineProperty(exports, "getPayloadHash", ({ enumerable: true, get: function () { return getPayloadHash_1.getPayloadHash; } })); +var moveHeadersToQuery_1 = __nccwpck_require__(9828); +Object.defineProperty(exports, "moveHeadersToQuery", ({ enumerable: true, get: function () { return moveHeadersToQuery_1.moveHeadersToQuery; } })); +var prepareRequest_1 = __nccwpck_require__(75); +Object.defineProperty(exports, "prepareRequest", ({ enumerable: true, get: function () { return prepareRequest_1.prepareRequest; } })); +tslib_1.__exportStar(__nccwpck_require__(9623), exports); + + +/***/ }), + +/***/ 9828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.moveHeadersToQuery = void 0; +const cloneRequest_1 = __nccwpck_require__(9098); +const moveHeadersToQuery = (request, options = {}) => { + var _a; + const { headers, query = {} } = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !((_a = options.unhoistableHeaders) === null || _a === void 0 ? void 0 : _a.has(lname))) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; +exports.moveHeadersToQuery = moveHeadersToQuery; + + +/***/ }), + +/***/ 75: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = void 0; +const cloneRequest_1 = __nccwpck_require__(9098); +const constants_1 = __nccwpck_require__(8644); +const prepareRequest = (request) => { + request = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const headerName of Object.keys(request.headers)) { + if (constants_1.GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; +exports.prepareRequest = prepareRequest; + + +/***/ }), + +/***/ 9299: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDate = exports.iso8601 = void 0; +const iso8601 = (time) => (0, exports.toDate)(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +exports.iso8601 = iso8601; +const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; +exports.toDate = toDate; + + +/***/ }), + +/***/ 438: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoOpLogger = void 0; +class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} +exports.NoOpLogger = NoOpLogger; + + +/***/ }), + +/***/ 1600: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Client = void 0; +const middleware_stack_1 = __nccwpck_require__(7911); +class Client { + constructor(config) { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); + this.config = config; + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + if (this.config.requestHandler.destroy) + this.config.requestHandler.destroy(); + } +} +exports.Client = Client; + + +/***/ }), + +/***/ 2813: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.collectBody = void 0; +const util_stream_1 = __nccwpck_require__(6607); +const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return util_stream_1.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; +exports.collectBody = collectBody; + + +/***/ }), + +/***/ 5414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Command = void 0; +const middleware_stack_1 = __nccwpck_require__(7911); +class Command { + constructor() { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); + } +} +exports.Command = Command; + + +/***/ }), + +/***/ 2541: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SENSITIVE_STRING = void 0; +exports.SENSITIVE_STRING = "***SensitiveInformation***"; + + +/***/ }), + +/***/ 6929: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createAggregatedClient = void 0; +const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; +exports.createAggregatedClient = createAggregatedClient; + + +/***/ }), + +/***/ 1737: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseEpochTimestamp = exports.parseRfc7231DateTime = exports.parseRfc3339DateTimeWithOffset = exports.parseRfc3339DateTime = exports.dateToUtcString = void 0; +const parse_utils_1 = __nccwpck_require__(4857); +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +exports.dateToUtcString = dateToUtcString; +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +exports.parseRfc3339DateTime = parseRfc3339DateTime; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +exports.parseRfc3339DateTimeWithOffset = parseRfc3339DateTimeWithOffset; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +exports.parseRfc7231DateTime = parseRfc7231DateTime; +const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = (0, parse_utils_1.strictParseDouble)(value); + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +exports.parseEpochTimestamp = parseEpochTimestamp; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = (0, parse_utils_1.strictParseByte)(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return (0, parse_utils_1.strictParseFloat32)("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; + + +/***/ }), + +/***/ 9681: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.withBaseException = exports.throwDefaultError = void 0; +const exceptions_1 = __nccwpck_require__(8074); +const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.code) || (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw (0, exceptions_1.decorateServiceException)(response, parsedBody); +}; +exports.throwDefaultError = throwDefaultError; +const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + (0, exports.throwDefaultError)({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +exports.withBaseException = withBaseException; +const deserializeMetadata = (output) => { + var _a, _b; + return ({ + httpStatusCode: output.statusCode, + requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], + }); +}; + + +/***/ }), + +/***/ 1163: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfigsForDefaultMode = void 0; +const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; +exports.loadConfigsForDefaultMode = loadConfigsForDefaultMode; + + +/***/ }), + +/***/ 2015: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.emitWarningIfUnsupportedVersion = void 0; +let warningEmitted = false; +const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 14) { + warningEmitted = true; + } +}; +exports.emitWarningIfUnsupportedVersion = emitWarningIfUnsupportedVersion; + + +/***/ }), + +/***/ 8074: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateServiceException = exports.ServiceException = void 0; +class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, ServiceException.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } +} +exports.ServiceException = ServiceException; +const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; +exports.decorateServiceException = decorateServiceException; + + +/***/ }), + +/***/ 6016: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extendedEncodeURIComponent = void 0; +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +exports.extendedEncodeURIComponent = extendedEncodeURIComponent; + + +/***/ }), + +/***/ 2638: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getArrayIfSingleItem = void 0; +const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; +exports.getArrayIfSingleItem = getArrayIfSingleItem; + + +/***/ }), + +/***/ 2188: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getValueFromTextNode = void 0; +const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = (0, exports.getValueFromTextNode)(obj[key]); + } + } + return obj; +}; +exports.getValueFromTextNode = getValueFromTextNode; + + +/***/ }), + +/***/ 3570: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(438), exports); +tslib_1.__exportStar(__nccwpck_require__(1600), exports); +tslib_1.__exportStar(__nccwpck_require__(2813), exports); +tslib_1.__exportStar(__nccwpck_require__(5414), exports); +tslib_1.__exportStar(__nccwpck_require__(2541), exports); +tslib_1.__exportStar(__nccwpck_require__(6929), exports); +tslib_1.__exportStar(__nccwpck_require__(1737), exports); +tslib_1.__exportStar(__nccwpck_require__(9681), exports); +tslib_1.__exportStar(__nccwpck_require__(1163), exports); +tslib_1.__exportStar(__nccwpck_require__(2015), exports); +tslib_1.__exportStar(__nccwpck_require__(8074), exports); +tslib_1.__exportStar(__nccwpck_require__(6016), exports); +tslib_1.__exportStar(__nccwpck_require__(2638), exports); +tslib_1.__exportStar(__nccwpck_require__(2188), exports); +tslib_1.__exportStar(__nccwpck_require__(2964), exports); +tslib_1.__exportStar(__nccwpck_require__(3495), exports); +tslib_1.__exportStar(__nccwpck_require__(4857), exports); +tslib_1.__exportStar(__nccwpck_require__(5342), exports); +tslib_1.__exportStar(__nccwpck_require__(9796), exports); +tslib_1.__exportStar(__nccwpck_require__(1752), exports); +tslib_1.__exportStar(__nccwpck_require__(2480), exports); + + +/***/ }), + +/***/ 2964: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LazyJsonString = exports.StringWrapper = void 0; +const StringWrapper = function () { + const Class = Object.getPrototypeOf(this).constructor; + const Constructor = Function.bind.apply(String, [null, ...arguments]); + const instance = new Constructor(); + Object.setPrototypeOf(instance, Class.prototype); + return instance; +}; +exports.StringWrapper = StringWrapper; +exports.StringWrapper.prototype = Object.create(String.prototype, { + constructor: { + value: exports.StringWrapper, + enumerable: false, + writable: true, + configurable: true, + }, +}); +Object.setPrototypeOf(exports.StringWrapper, String); +class LazyJsonString extends exports.StringWrapper { + deserializeJSON() { + return JSON.parse(super.toString()); + } + toJSON() { + return super.toString(); + } + static fromObject(object) { + if (object instanceof LazyJsonString) { + return object; + } + else if (object instanceof String || typeof object === "string") { + return new LazyJsonString(object); + } + return new LazyJsonString(JSON.stringify(object)); + } +} +exports.LazyJsonString = LazyJsonString; + + +/***/ }), + +/***/ 3495: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.take = exports.convertMap = exports.map = void 0; +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +exports.map = map; +const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +exports.convertMap = convertMap; +const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +exports.take = take; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; + + +/***/ }), + +/***/ 4857: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = exports.strictParseByte = exports.strictParseShort = exports.strictParseInt32 = exports.strictParseInt = exports.strictParseLong = exports.limitedParseFloat32 = exports.limitedParseFloat = exports.handleFloat = exports.limitedParseDouble = exports.strictParseFloat32 = exports.strictParseFloat = exports.strictParseDouble = exports.expectUnion = exports.expectString = exports.expectObject = exports.expectNonNull = exports.expectByte = exports.expectShort = exports.expectInt32 = exports.expectInt = exports.expectLong = exports.expectFloat32 = exports.expectNumber = exports.expectBoolean = exports.parseBoolean = void 0; +const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +exports.parseBoolean = parseBoolean; +const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +exports.expectBoolean = expectBoolean; +const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + exports.logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +exports.expectNumber = expectNumber; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +const expectFloat32 = (value) => { + const expected = (0, exports.expectNumber)(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +exports.expectFloat32 = expectFloat32; +const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +exports.expectLong = expectLong; +exports.expectInt = exports.expectLong; +const expectInt32 = (value) => expectSizedInt(value, 32); +exports.expectInt32 = expectInt32; +const expectShort = (value) => expectSizedInt(value, 16); +exports.expectShort = expectShort; +const expectByte = (value) => expectSizedInt(value, 8); +exports.expectByte = expectByte; +const expectSizedInt = (value, size) => { + const expected = (0, exports.expectLong)(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +exports.expectNonNull = expectNonNull; +const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +exports.expectObject = expectObject; +const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + exports.logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +exports.expectString = expectString; +const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = (0, exports.expectObject)(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +exports.expectUnion = expectUnion; +const strictParseDouble = (value) => { + if (typeof value == "string") { + return (0, exports.expectNumber)(parseNumber(value)); + } + return (0, exports.expectNumber)(value); +}; +exports.strictParseDouble = strictParseDouble; +exports.strictParseFloat = exports.strictParseDouble; +const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return (0, exports.expectFloat32)(parseNumber(value)); + } + return (0, exports.expectFloat32)(value); +}; +exports.strictParseFloat32 = strictParseFloat32; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectNumber)(value); +}; +exports.limitedParseDouble = limitedParseDouble; +exports.handleFloat = exports.limitedParseDouble; +exports.limitedParseFloat = exports.limitedParseDouble; +const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectFloat32)(value); +}; +exports.limitedParseFloat32 = limitedParseFloat32; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +const strictParseLong = (value) => { + if (typeof value === "string") { + return (0, exports.expectLong)(parseNumber(value)); + } + return (0, exports.expectLong)(value); +}; +exports.strictParseLong = strictParseLong; +exports.strictParseInt = exports.strictParseLong; +const strictParseInt32 = (value) => { + if (typeof value === "string") { + return (0, exports.expectInt32)(parseNumber(value)); + } + return (0, exports.expectInt32)(value); +}; +exports.strictParseInt32 = strictParseInt32; +const strictParseShort = (value) => { + if (typeof value === "string") { + return (0, exports.expectShort)(parseNumber(value)); + } + return (0, exports.expectShort)(value); +}; +exports.strictParseShort = strictParseShort; +const strictParseByte = (value) => { + if (typeof value === "string") { + return (0, exports.expectByte)(parseNumber(value)); + } + return (0, exports.expectByte)(value); +}; +exports.strictParseByte = strictParseByte; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +exports.logger = { + warn: console.warn, +}; + + +/***/ }), + +/***/ 5342: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolvedPath = void 0; +const extended_encode_uri_component_1 = __nccwpck_require__(6016); +const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(segment)) + .join("/") + : (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; +}; +exports.resolvedPath = resolvedPath; + + +/***/ }), + +/***/ 9796: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializeFloat = void 0; +const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +exports.serializeFloat = serializeFloat; + + +/***/ }), + +/***/ 1752: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._json = void 0; +const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = (0, exports._json)(obj[key]); + } + return target; + } + return obj; +}; +exports._json = _json; + + +/***/ }), + +/***/ 2480: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitEvery = void 0; +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +exports.splitEvery = splitEvery; + + +/***/ }), + +/***/ 4075: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8960: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation = exports.HttpAuthLocation || (exports.HttpAuthLocation = {})); + + +/***/ }), + +/***/ 3274: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8340: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4744: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8270: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9580: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9580), exports); +tslib_1.__exportStar(__nccwpck_require__(8398), exports); +tslib_1.__exportStar(__nccwpck_require__(6522), exports); + + +/***/ }), + +/***/ 8398: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6522: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9035: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7225: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4126: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme = exports.EndpointURLScheme || (exports.EndpointURLScheme = {})); + + +/***/ }), + +/***/ 5612: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 3084: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9843: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 3799: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1550: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5612), exports); +tslib_1.__exportStar(__nccwpck_require__(3084), exports); +tslib_1.__exportStar(__nccwpck_require__(9843), exports); +tslib_1.__exportStar(__nccwpck_require__(7658), exports); +tslib_1.__exportStar(__nccwpck_require__(3799), exports); + + +/***/ }), + +/***/ 7658: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8508: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8947: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; +var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId = exports.AlgorithmId || (exports.AlgorithmId = {})); +const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + }, + }; +}; +exports.getChecksumConfiguration = getChecksumConfiguration; +const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; +exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; + + +/***/ }), + +/***/ 9169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = void 0; +const checksum_1 = __nccwpck_require__(8947); +const getDefaultClientConfiguration = (runtimeConfig) => { + return { + ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), + }; +}; +exports.getDefaultClientConfiguration = getDefaultClientConfiguration; +const resolveDefaultRuntimeConfig = (config) => { + return { + ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), + }; +}; +exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; + + +/***/ }), + +/***/ 7447: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9169), exports); + + +/***/ }), + +/***/ 8883: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldPosition = void 0; +var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition = exports.FieldPosition || (exports.FieldPosition = {})); + + +/***/ }), + +/***/ 7545: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9123: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8006: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(7545), exports); +tslib_1.__exportStar(__nccwpck_require__(9123), exports); + + +/***/ }), + +/***/ 5756: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4075), exports); +tslib_1.__exportStar(__nccwpck_require__(8960), exports); +tslib_1.__exportStar(__nccwpck_require__(3274), exports); +tslib_1.__exportStar(__nccwpck_require__(8340), exports); +tslib_1.__exportStar(__nccwpck_require__(4744), exports); +tslib_1.__exportStar(__nccwpck_require__(8270), exports); +tslib_1.__exportStar(__nccwpck_require__(7628), exports); +tslib_1.__exportStar(__nccwpck_require__(9035), exports); +tslib_1.__exportStar(__nccwpck_require__(7225), exports); +tslib_1.__exportStar(__nccwpck_require__(4126), exports); +tslib_1.__exportStar(__nccwpck_require__(1550), exports); +tslib_1.__exportStar(__nccwpck_require__(8508), exports); +tslib_1.__exportStar(__nccwpck_require__(7447), exports); +tslib_1.__exportStar(__nccwpck_require__(8883), exports); +tslib_1.__exportStar(__nccwpck_require__(8006), exports); +tslib_1.__exportStar(__nccwpck_require__(2866), exports); +tslib_1.__exportStar(__nccwpck_require__(7756), exports); +tslib_1.__exportStar(__nccwpck_require__(5489), exports); +tslib_1.__exportStar(__nccwpck_require__(6524), exports); +tslib_1.__exportStar(__nccwpck_require__(4603), exports); +tslib_1.__exportStar(__nccwpck_require__(3752), exports); +tslib_1.__exportStar(__nccwpck_require__(774), exports); +tslib_1.__exportStar(__nccwpck_require__(4089), exports); +tslib_1.__exportStar(__nccwpck_require__(5678), exports); +tslib_1.__exportStar(__nccwpck_require__(9926), exports); +tslib_1.__exportStar(__nccwpck_require__(9945), exports); +tslib_1.__exportStar(__nccwpck_require__(8564), exports); +tslib_1.__exportStar(__nccwpck_require__(1285), exports); +tslib_1.__exportStar(__nccwpck_require__(364), exports); +tslib_1.__exportStar(__nccwpck_require__(9304), exports); +tslib_1.__exportStar(__nccwpck_require__(375), exports); +tslib_1.__exportStar(__nccwpck_require__(6894), exports); +tslib_1.__exportStar(__nccwpck_require__(7887), exports); +tslib_1.__exportStar(__nccwpck_require__(7544), exports); + + +/***/ }), + +/***/ 2866: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7756: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 5489: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6524: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4603: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 3752: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 774: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4089: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 5678: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9926: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 9945: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8564: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 1285: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 364: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol = exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {})); + + +/***/ }), + +/***/ 9304: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 375: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 6894: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7887: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 7544: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 4681: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseUrl = void 0; +const querystring_parser_1 = __nccwpck_require__(4769); +const parseUrl = (url) => { + if (typeof url === "string") { + return (0, exports.parseUrl)(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, querystring_parser_1.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; +exports.parseUrl = parseUrl; + + +/***/ }), + +/***/ 305: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(1381); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; + + +/***/ }), + +/***/ 5600: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(305), exports); +tslib_1.__exportStar(__nccwpck_require__(4730), exports); + + +/***/ }), + +/***/ 4730: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(1381); +const toBase64 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +exports.toBase64 = toBase64; + + +/***/ }), + +/***/ 4880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.calculateBodyLength = void 0; +const fs_1 = __nccwpck_require__(7147); +const calculateBodyLength = (body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.from(body).length; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, fs_1.lstatSync)(body.path).size; + } + else if (typeof body.fd === "number") { + return (0, fs_1.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; +exports.calculateBodyLength = calculateBodyLength; + + +/***/ }), + +/***/ 8075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4880), exports); + + +/***/ }), + +/***/ 1381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromString = exports.fromArrayBuffer = void 0; +const is_array_buffer_1 = __nccwpck_require__(780); +const buffer_1 = __nccwpck_require__(4300); +const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!(0, is_array_buffer_1.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return buffer_1.Buffer.from(input, offset, length); +}; +exports.fromArrayBuffer = fromArrayBuffer; +const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? buffer_1.Buffer.from(input, encoding) : buffer_1.Buffer.from(input); +}; +exports.fromString = fromString; + + +/***/ }), + +/***/ 2491: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.booleanSelector = exports.SelectorType = void 0; +var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType = exports.SelectorType || (exports.SelectorType = {})); +const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; +exports.booleanSelector = booleanSelector; + + +/***/ }), + +/***/ 3375: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2491), exports); + + +/***/ }), + +/***/ 6470: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IMDS_REGION_PATH = exports.DEFAULTS_MODE_OPTIONS = exports.ENV_IMDS_DISABLED = exports.AWS_DEFAULT_REGION_ENV = exports.AWS_REGION_ENV = exports.AWS_EXECUTION_ENV = void 0; +exports.AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +exports.AWS_REGION_ENV = "AWS_REGION"; +exports.AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +exports.IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + + +/***/ }), + +/***/ 5577: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = void 0; +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; + + +/***/ }), + +/***/ 2429: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(6217), exports); + + +/***/ }), + +/***/ 6217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultsModeConfig = void 0; +const config_resolver_1 = __nccwpck_require__(3098); +const credential_provider_imds_1 = __nccwpck_require__(7477); +const node_config_provider_1 = __nccwpck_require__(3461); +const property_provider_1 = __nccwpck_require__(9721); +const constants_1 = __nccwpck_require__(6470); +const defaultsModeConfig_1 = __nccwpck_require__(5577); +const resolveDefaultsModeConfig = ({ region = (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS), defaultsMode = (0, node_config_provider_1.loadConfig)(defaultsModeConfig_1.NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + var _a; + if (process.env[constants_1.AWS_EXECUTION_ENV] && (process.env[constants_1.AWS_REGION_ENV] || process.env[constants_1.AWS_DEFAULT_REGION_ENV])) { + return (_a = process.env[constants_1.AWS_REGION_ENV]) !== null && _a !== void 0 ? _a : process.env[constants_1.AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[constants_1.ENV_IMDS_DISABLED]) { + try { + const endpoint = await (0, credential_provider_imds_1.getInstanceMetadataEndpoint)(); + return (await (0, credential_provider_imds_1.httpRequest)({ ...endpoint, path: constants_1.IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; + + +/***/ }), + +/***/ 5364: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toHex = exports.fromHex = void 0; +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +exports.fromHex = fromHex; +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +exports.toHex = toHex; + + +/***/ }), + +/***/ 2390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(149), exports); + + +/***/ }), + +/***/ 149: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalizeProvider = void 0; +const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; +exports.normalizeProvider = normalizeProvider; + + +/***/ }), + +/***/ 5053: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AdaptiveRetryStrategy = void 0; +const config_1 = __nccwpck_require__(3435); +const DefaultRateLimiter_1 = __nccwpck_require__(2234); +const StandardRetryStrategy_1 = __nccwpck_require__(8361); +class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = config_1.RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options !== null && options !== void 0 ? options : {}; + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new DefaultRateLimiter_1.DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy_1.StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; + + +/***/ }), + +/***/ 5689: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ConfiguredRetryStrategy = void 0; +const constants_1 = __nccwpck_require__(6302); +const StandardRetryStrategy_1 = __nccwpck_require__(8361); +class ConfiguredRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = constants_1.DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} +exports.ConfiguredRetryStrategy = ConfiguredRetryStrategy; + + +/***/ }), + +/***/ 2234: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultRateLimiter = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +class DefaultRateLimiter { + constructor(options) { + var _a, _b, _c, _d, _e; + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = (_a = options === null || options === void 0 ? void 0 : options.beta) !== null && _a !== void 0 ? _a : 0.7; + this.minCapacity = (_b = options === null || options === void 0 ? void 0 : options.minCapacity) !== null && _b !== void 0 ? _b : 1; + this.minFillRate = (_c = options === null || options === void 0 ? void 0 : options.minFillRate) !== null && _c !== void 0 ? _c : 0.5; + this.scaleConstant = (_d = options === null || options === void 0 ? void 0 : options.scaleConstant) !== null && _d !== void 0 ? _d : 0.4; + this.smooth = (_e = options === null || options === void 0 ? void 0 : options.smooth) !== null && _e !== void 0 ? _e : 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, service_error_classification_1.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +exports.DefaultRateLimiter = DefaultRateLimiter; + + +/***/ }), + +/***/ 8361: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StandardRetryStrategy = void 0; +const config_1 = __nccwpck_require__(3435); +const constants_1 = __nccwpck_require__(6302); +const defaultRetryBackoffStrategy_1 = __nccwpck_require__(1337); +const defaultRetryToken_1 = __nccwpck_require__(1127); +class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = config_1.RETRY_MODES.STANDARD; + this.capacity = constants_1.INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = (0, defaultRetryBackoffStrategy_1.getDefaultRetryBackoffStrategy)(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay: constants_1.DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? constants_1.THROTTLING_RETRY_DELAY_BASE : constants_1.DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + var _a; + this.capacity = Math.max(constants_1.INITIAL_RETRY_TOKENS, this.capacity + ((_a = token.getRetryCost()) !== null && _a !== void 0 ? _a : constants_1.NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${config_1.DEFAULT_MAX_ATTEMPTS}`); + return config_1.DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? constants_1.TIMEOUT_RETRY_COST : constants_1.RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} +exports.StandardRetryStrategy = StandardRetryStrategy; + + +/***/ }), + +/***/ 3435: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_RETRY_MODE = exports.DEFAULT_MAX_ATTEMPTS = exports.RETRY_MODES = void 0; +var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES = exports.RETRY_MODES || (exports.RETRY_MODES = {})); +exports.DEFAULT_MAX_ATTEMPTS = 3; +exports.DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; + + +/***/ }), + +/***/ 6302: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.REQUEST_HEADER = exports.INVOCATION_ID_HEADER = exports.NO_RETRY_INCREMENT = exports.TIMEOUT_RETRY_COST = exports.RETRY_COST = exports.INITIAL_RETRY_TOKENS = exports.THROTTLING_RETRY_DELAY_BASE = exports.MAXIMUM_RETRY_DELAY = exports.DEFAULT_RETRY_DELAY_BASE = void 0; +exports.DEFAULT_RETRY_DELAY_BASE = 100; +exports.MAXIMUM_RETRY_DELAY = 20 * 1000; +exports.THROTTLING_RETRY_DELAY_BASE = 500; +exports.INITIAL_RETRY_TOKENS = 500; +exports.RETRY_COST = 5; +exports.TIMEOUT_RETRY_COST = 10; +exports.NO_RETRY_INCREMENT = 1; +exports.INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +exports.REQUEST_HEADER = "amz-sdk-request"; + + +/***/ }), + +/***/ 1337: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRetryBackoffStrategy = void 0; +const constants_1 = __nccwpck_require__(6302); +const getDefaultRetryBackoffStrategy = () => { + let delayBase = constants_1.DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(constants_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; +exports.getDefaultRetryBackoffStrategy = getDefaultRetryBackoffStrategy; + + +/***/ }), + +/***/ 1127: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultRetryToken = void 0; +const constants_1 = __nccwpck_require__(6302); +const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(constants_1.MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; +exports.createDefaultRetryToken = createDefaultRetryToken; + + +/***/ }), + +/***/ 4902: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5053), exports); +tslib_1.__exportStar(__nccwpck_require__(5689), exports); +tslib_1.__exportStar(__nccwpck_require__(2234), exports); +tslib_1.__exportStar(__nccwpck_require__(8361), exports); +tslib_1.__exportStar(__nccwpck_require__(3435), exports); +tslib_1.__exportStar(__nccwpck_require__(6302), exports); +tslib_1.__exportStar(__nccwpck_require__(5427), exports); + + +/***/ }), + +/***/ 5427: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 2094: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Uint8ArrayBlobAdapter = void 0; +const transforms_1 = __nccwpck_require__(2098); +class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return (0, transforms_1.transformFromString)(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return (0, transforms_1.transformToString)(this, encoding); + } +} +exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter; + + +/***/ }), + +/***/ 2098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.transformFromString = exports.transformToString = void 0; +const util_base64_1 = __nccwpck_require__(5600); +const util_utf8_1 = __nccwpck_require__(1895); +const Uint8ArrayBlobAdapter_1 = __nccwpck_require__(2094); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(payload); + } + return (0, util_utf8_1.toUtf8)(payload); +} +exports.transformToString = transformToString; +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_base64_1.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_utf8_1.fromUtf8)(str)); +} +exports.transformFromString = transformFromString; + + +/***/ }), + +/***/ 3636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = __nccwpck_require__(2781); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; + + +/***/ }), + +/***/ 6607: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2094), exports); +tslib_1.__exportStar(__nccwpck_require__(3636), exports); +tslib_1.__exportStar(__nccwpck_require__(4515), exports); + + +/***/ }), + +/***/ 4515: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = __nccwpck_require__(258); +const util_buffer_from_1 = __nccwpck_require__(1381); +const stream_1 = __nccwpck_require__(2781); +const util_1 = __nccwpck_require__(3837); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new util_1.TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; + + +/***/ }), + +/***/ 6174: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUriPath = void 0; +const escape_uri_1 = __nccwpck_require__(10); +const escapeUriPath = (uri) => uri.split("/").map(escape_uri_1.escapeUri).join("/"); +exports.escapeUriPath = escapeUriPath; + + +/***/ }), + +/***/ 10: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUri = void 0; +const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +exports.escapeUri = escapeUri; +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; + + +/***/ }), + +/***/ 4197: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(10), exports); +tslib_1.__exportStar(__nccwpck_require__(6174), exports); + + +/***/ }), + +/***/ 5917: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(1381); +const fromUtf8 = (input) => { + const buf = (0, util_buffer_from_1.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; +exports.fromUtf8 = fromUtf8; + + +/***/ }), + +/***/ 1895: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(5917), exports); +tslib_1.__exportStar(__nccwpck_require__(5470), exports); +tslib_1.__exportStar(__nccwpck_require__(9960), exports); + + +/***/ }), + +/***/ 5470: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUint8Array = void 0; +const fromUtf8_1 = __nccwpck_require__(5917); +const toUint8Array = (data) => { + if (typeof data === "string") { + return (0, fromUtf8_1.fromUtf8)(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; +exports.toUint8Array = toUint8Array; + + +/***/ }), + +/***/ 9960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(1381); +const toUtf8 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +exports.toUtf8 = toUtf8; + + +/***/ }), + +/***/ 2603: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(1739); +const XMLParser = __nccwpck_require__(2380); +const XMLBuilder = __nccwpck_require__(660); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 8280: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 1739: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(8280); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(2462); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if (typeof jObj[key] === 'undefined') { + // supress undefined node + } else if (jObj[key] === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 6072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(8280); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 6993: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 5832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(8280); +const xmlNode = __nccwpck_require__(7462); +const readDocType = __nccwpck_require__(6072); +const toNumber = __nccwpck_require__(4526); + +const regx = + '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' + .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + // let val = this.parseTextData(tagExp, this.options.cdataPropName, jPath + "." + this.options.cdataPropName, true, false, true); + // if(!val) val = ""; + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true); + if(val == undefined) val = ""; + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { //TODO: namespace + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, tagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${tagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substr(0, separatorIndex).replace(/\s\s*$/, ''); + tagExp = tagExp.substr(separatorIndex + 1); + } + + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 2380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(6993); +const OrderedObjParser = __nccwpck_require__(5832); +const { prettify} = __nccwpck_require__(2882); +const validator = __nccwpck_require__(1739); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 2882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 7462: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 4526: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} + + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } + + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); + + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} +module.exports = toNumber + + +/***/ }), + +/***/ 4351: +/***/ ((module) => { + +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); + + +/***/ }), + +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(4219); + + +/***/ }), + +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } function onError(cause) { connectReq.removeAllListeners(); @@ -2100,7 +19911,7 @@ exports.debug = debug; // for test /***/ }), -/***/ 840: +/***/ 5840: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2164,29 +19975,29 @@ Object.defineProperty(exports, "parse", ({ } })); -var _v = _interopRequireDefault(__nccwpck_require__(628)); +var _v = _interopRequireDefault(__nccwpck_require__(8628)); -var _v2 = _interopRequireDefault(__nccwpck_require__(409)); +var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); -var _v3 = _interopRequireDefault(__nccwpck_require__(122)); +var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); -var _v4 = _interopRequireDefault(__nccwpck_require__(120)); +var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); -var _nil = _interopRequireDefault(__nccwpck_require__(332)); +var _nil = _interopRequireDefault(__nccwpck_require__(5332)); -var _version = _interopRequireDefault(__nccwpck_require__(595)); +var _version = _interopRequireDefault(__nccwpck_require__(1595)); -var _validate = _interopRequireDefault(__nccwpck_require__(900)); +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); -var _stringify = _interopRequireDefault(__nccwpck_require__(950)); +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); -var _parse = _interopRequireDefault(__nccwpck_require__(746)); +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 569: +/***/ 4569: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2197,7 +20008,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _crypto = _interopRequireDefault(__nccwpck_require__(113)); +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2216,7 +20027,7 @@ exports["default"] = _default; /***/ }), -/***/ 332: +/***/ 5332: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -2231,7 +20042,7 @@ exports["default"] = _default; /***/ }), -/***/ 746: +/***/ 2746: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2242,7 +20053,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(900)); +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2309,7 +20120,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = rng; -var _crypto = _interopRequireDefault(__nccwpck_require__(113)); +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2329,7 +20140,7 @@ function rng() { /***/ }), -/***/ 274: +/***/ 5274: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2340,7 +20151,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _crypto = _interopRequireDefault(__nccwpck_require__(113)); +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2359,7 +20170,7 @@ exports["default"] = _default; /***/ }), -/***/ 950: +/***/ 8950: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2370,7 +20181,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(900)); +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2405,7 +20216,7 @@ exports["default"] = _default; /***/ }), -/***/ 628: +/***/ 8628: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2418,7 +20229,7 @@ exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(807)); -var _stringify = _interopRequireDefault(__nccwpck_require__(950)); +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2519,7 +20330,7 @@ exports["default"] = _default; /***/ }), -/***/ 409: +/***/ 6409: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2530,9 +20341,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _v = _interopRequireDefault(__nccwpck_require__(998)); +var _v = _interopRequireDefault(__nccwpck_require__(5998)); -var _md = _interopRequireDefault(__nccwpck_require__(569)); +var _md = _interopRequireDefault(__nccwpck_require__(4569)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2542,7 +20353,7 @@ exports["default"] = _default; /***/ }), -/***/ 998: +/***/ 5998: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2554,9 +20365,9 @@ Object.defineProperty(exports, "__esModule", ({ exports["default"] = _default; exports.URL = exports.DNS = void 0; -var _stringify = _interopRequireDefault(__nccwpck_require__(950)); +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); -var _parse = _interopRequireDefault(__nccwpck_require__(746)); +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2627,7 +20438,7 @@ function _default(name, version, hashfunc) { /***/ }), -/***/ 122: +/***/ 5122: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2640,7 +20451,7 @@ exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(807)); -var _stringify = _interopRequireDefault(__nccwpck_require__(950)); +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2671,7 +20482,7 @@ exports["default"] = _default; /***/ }), -/***/ 120: +/***/ 9120: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2682,9 +20493,9 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _v = _interopRequireDefault(__nccwpck_require__(998)); +var _v = _interopRequireDefault(__nccwpck_require__(5998)); -var _sha = _interopRequireDefault(__nccwpck_require__(274)); +var _sha = _interopRequireDefault(__nccwpck_require__(5274)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2694,7 +20505,7 @@ exports["default"] = _default; /***/ }), -/***/ 900: +/***/ 6900: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2718,7 +20529,7 @@ exports["default"] = _default; /***/ }), -/***/ 595: +/***/ 1595: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -2729,7 +20540,7 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(900)); +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } @@ -2746,7 +20557,253 @@ exports["default"] = _default; /***/ }), -/***/ 491: +/***/ 7424: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cleanup = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const helpers_1 = __nccwpck_require__(3015); +/** + * When the GitHub Actions job is done, clean up any environment variables that + * may have been set by the configure-aws-credentials steps in the job. + * + * Environment variables are not intended to be shared across different jobs in + * the same GitHub Actions workflow: GitHub Actions documentation states that + * each job runs in a fresh instance. However, doing our own cleanup will + * give us additional assurance that these environment variables are not shared + * with any other jobs. + */ +function cleanup() { + try { + // The GitHub Actions toolkit does not have an option to completely unset + // environment variables, so we overwrite the current value with an empty + // string. The AWS CLI and AWS SDKs will behave correctly: they treat an + // empty string value as if the environment variable does not exist. + core.exportVariable('AWS_ACCESS_KEY_ID', ''); + core.exportVariable('AWS_SECRET_ACCESS_KEY', ''); + core.exportVariable('AWS_SESSION_TOKEN', ''); + core.exportVariable('AWS_DEFAULT_REGION', ''); + core.exportVariable('AWS_REGION', ''); + } + catch (error) { + core.setFailed((0, helpers_1.errorMessage)(error)); + } +} +exports.cleanup = cleanup; +/* c8 ignore start */ +if (require.main === require.cache[eval('__filename')]) { + try { + cleanup(); + } + catch (error) { + core.setFailed((0, helpers_1.errorMessage)(error)); + } +} + + +/***/ }), + +/***/ 3015: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDefined = exports.errorMessage = exports.retryAndBackoff = exports.verifyKeys = exports.reset = exports.withsleep = exports.defaultSleep = exports.sanitizeGitHubVariables = exports.exportAccountId = exports.exportRegion = exports.unsetCredentials = exports.exportCredentials = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const client_sts_1 = __nccwpck_require__(2209); +const MAX_TAG_VALUE_LENGTH = 256; +const SANITIZATION_CHARACTER = '_'; +const SPECIAL_CHARS_REGEX = /[!@#$%^&*()_+\-=[\]{};':"\\|,.<>/?]+/; +// Configure the AWS CLI and AWS SDKs using environment variables and set them as secrets. +// Setting the credentials as secrets masks them in Github Actions logs +function exportCredentials(creds, outputCredentials) { + if (creds?.AccessKeyId) { + core.setSecret(creds.AccessKeyId); + core.exportVariable('AWS_ACCESS_KEY_ID', creds.AccessKeyId); + } + if (creds?.SecretAccessKey) { + core.setSecret(creds.SecretAccessKey); + core.exportVariable('AWS_SECRET_ACCESS_KEY', creds.SecretAccessKey); + } + if (creds?.SessionToken) { + core.setSecret(creds.SessionToken); + core.exportVariable('AWS_SESSION_TOKEN', creds.SessionToken); + } + else if (process.env['AWS_SESSION_TOKEN']) { + // clear session token from previous credentials action + core.exportVariable('AWS_SESSION_TOKEN', ''); + } + if (outputCredentials) { + if (creds?.AccessKeyId) { + core.setOutput('aws-access-key-id', creds.AccessKeyId); + } + if (creds?.SecretAccessKey) { + core.setOutput('aws-secret-access-key', creds.SecretAccessKey); + } + if (creds?.SessionToken) { + core.setOutput('aws-session-token', creds.SessionToken); + } + } +} +exports.exportCredentials = exportCredentials; +function unsetCredentials() { + core.exportVariable('AWS_ACCESS_KEY_ID', ''); + core.exportVariable('AWS_SECRET_ACCESS_KEY', ''); + core.exportVariable('AWS_SESSION_TOKEN', ''); + core.exportVariable('AWS_REGION', ''); + core.exportVariable('AWS_DEFAULT_REGION', ''); +} +exports.unsetCredentials = unsetCredentials; +function exportRegion(region) { + core.exportVariable('AWS_DEFAULT_REGION', region); + core.exportVariable('AWS_REGION', region); +} +exports.exportRegion = exportRegion; +// Obtains account ID from STS Client and sets it as output +async function exportAccountId(credentialsClient, maskAccountId) { + const client = credentialsClient.stsClient; + const identity = await client.send(new client_sts_1.GetCallerIdentityCommand({})); + const accountId = identity.Account; + if (!accountId) { + throw new Error('Could not get Account ID from STS. Did you set credentials?'); + } + if (maskAccountId) { + core.setSecret(accountId); + } + core.setOutput('aws-account-id', accountId); + return accountId; +} +exports.exportAccountId = exportAccountId; +// Tags have a more restrictive set of acceptable characters than GitHub environment variables can. +// This replaces anything not conforming to the tag restrictions by inverting the regular expression. +// See the AWS documentation for constraint specifics https://docs.aws.amazon.com/STS/latest/APIReference/API_Tag.html. +function sanitizeGitHubVariables(name) { + const nameWithoutSpecialCharacters = name.replace(/[^\p{L}\p{Z}\p{N}_.:/=+\-@]/gu, SANITIZATION_CHARACTER); + const nameTruncated = nameWithoutSpecialCharacters.slice(0, MAX_TAG_VALUE_LENGTH); + return nameTruncated; +} +exports.sanitizeGitHubVariables = sanitizeGitHubVariables; +async function defaultSleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +exports.defaultSleep = defaultSleep; +let sleep = defaultSleep; +function withsleep(s) { + sleep = s; +} +exports.withsleep = withsleep; +function reset() { + sleep = defaultSleep; +} +exports.reset = reset; +function verifyKeys(creds) { + if (!creds) { + return; + } + if (creds.AccessKeyId) { + if (SPECIAL_CHARS_REGEX.test(creds.AccessKeyId)) { + throw new Error('AccessKeyId contains special characters.'); + } + } + if (creds.SecretAccessKey) { + if (SPECIAL_CHARS_REGEX.test(creds.SecretAccessKey)) { + throw new Error('SecretAccessKey contains special characters.'); + } + } +} +exports.verifyKeys = verifyKeys; +// Retries the promise with exponential backoff if the error isRetryable up to maxRetries time. +async function retryAndBackoff(fn, isRetryable, maxRetries = 12, retries = 0, base = 50) { + try { + return await fn(); + } + catch (err) { + if (!isRetryable) { + throw err; + } + // It's retryable, so sleep and retry. + await sleep(Math.random() * (Math.pow(2, retries) * base)); + retries += 1; + if (retries >= maxRetries) { + throw err; + } + return await retryAndBackoff(fn, isRetryable, maxRetries, retries, base); + } +} +exports.retryAndBackoff = retryAndBackoff; +/* c8 ignore start */ +function errorMessage(error) { + return error instanceof Error ? error.message : String(error); +} +exports.errorMessage = errorMessage; +function isDefined(i) { + return i !== undefined && i !== null; +} +exports.isDefined = isDefined; +/* c8 ignore stop */ + + +/***/ }), + +/***/ 7578: +/***/ ((module) => { + +module.exports = eval("require")("aws-crt"); + + +/***/ }), + +/***/ 9491: /***/ ((module) => { "use strict"; @@ -2754,7 +20811,23 @@ module.exports = require("assert"); /***/ }), -/***/ 113: +/***/ 4300: +/***/ ((module) => { + +"use strict"; +module.exports = require("buffer"); + +/***/ }), + +/***/ 2081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 6113: /***/ ((module) => { "use strict"; @@ -2762,7 +20835,7 @@ module.exports = require("crypto"); /***/ }), -/***/ 361: +/***/ 2361: /***/ ((module) => { "use strict"; @@ -2770,7 +20843,7 @@ module.exports = require("events"); /***/ }), -/***/ 147: +/***/ 7147: /***/ ((module) => { "use strict"; @@ -2778,7 +20851,7 @@ module.exports = require("fs"); /***/ }), -/***/ 685: +/***/ 3685: /***/ ((module) => { "use strict"; @@ -2786,7 +20859,15 @@ module.exports = require("http"); /***/ }), -/***/ 687: +/***/ 5158: +/***/ ((module) => { + +"use strict"; +module.exports = require("http2"); + +/***/ }), + +/***/ 5687: /***/ ((module) => { "use strict"; @@ -2794,7 +20875,7 @@ module.exports = require("https"); /***/ }), -/***/ 808: +/***/ 1808: /***/ ((module) => { "use strict"; @@ -2802,7 +20883,7 @@ module.exports = require("net"); /***/ }), -/***/ 37: +/***/ 2037: /***/ ((module) => { "use strict"; @@ -2810,7 +20891,7 @@ module.exports = require("os"); /***/ }), -/***/ 17: +/***/ 1017: /***/ ((module) => { "use strict"; @@ -2818,7 +20899,23 @@ module.exports = require("path"); /***/ }), -/***/ 404: +/***/ 7282: +/***/ ((module) => { + +"use strict"; +module.exports = require("process"); + +/***/ }), + +/***/ 2781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 4404: /***/ ((module) => { "use strict"; @@ -2826,12 +20923,44 @@ module.exports = require("tls"); /***/ }), -/***/ 837: +/***/ 7310: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 3837: /***/ ((module) => { "use strict"; module.exports = require("util"); +/***/ }), + +/***/ 1092: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('{"name":"@aws-sdk/client-sso","description":"AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native","version":"3.391.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"tsc -p tsconfig.cjs.json","build:docs":"typedoc","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo sso"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"3.0.0","@aws-crypto/sha256-js":"3.0.0","@aws-sdk/middleware-host-header":"3.391.0","@aws-sdk/middleware-logger":"3.391.0","@aws-sdk/middleware-recursion-detection":"3.391.0","@aws-sdk/middleware-user-agent":"3.391.0","@aws-sdk/types":"3.391.0","@aws-sdk/util-endpoints":"3.391.0","@aws-sdk/util-user-agent-browser":"3.391.0","@aws-sdk/util-user-agent-node":"3.391.0","@smithy/config-resolver":"^2.0.3","@smithy/fetch-http-handler":"^2.0.3","@smithy/hash-node":"^2.0.3","@smithy/invalid-dependency":"^2.0.3","@smithy/middleware-content-length":"^2.0.3","@smithy/middleware-endpoint":"^2.0.3","@smithy/middleware-retry":"^2.0.3","@smithy/middleware-serde":"^2.0.3","@smithy/middleware-stack":"^2.0.0","@smithy/node-config-provider":"^2.0.3","@smithy/node-http-handler":"^2.0.3","@smithy/protocol-http":"^2.0.3","@smithy/smithy-client":"^2.0.3","@smithy/types":"^2.2.0","@smithy/url-parser":"^2.0.3","@smithy/util-base64":"^2.0.0","@smithy/util-body-length-browser":"^2.0.0","@smithy/util-body-length-node":"^2.0.0","@smithy/util-defaults-mode-browser":"^2.0.3","@smithy/util-defaults-mode-node":"^2.0.3","@smithy/util-retry":"^2.0.0","@smithy/util-utf8":"^2.0.0","tslib":"^2.5.0"},"devDependencies":{"@smithy/service-client-documentation-generator":"^2.0.0","@tsconfig/node14":"1.0.3","@types/node":"^14.14.31","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typedoc":"0.23.23","typescript":"~4.9.5"},"engines":{"node":">=14.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-sso"}}'); + +/***/ }), + +/***/ 7947: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('{"name":"@aws-sdk/client-sts","description":"AWS SDK for JavaScript Sts Client for Node.js, Browser and React Native","version":"3.391.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"tsc -p tsconfig.cjs.json","build:docs":"typedoc","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo sts","test":"yarn test:unit","test:unit":"jest"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"3.0.0","@aws-crypto/sha256-js":"3.0.0","@aws-sdk/credential-provider-node":"3.391.0","@aws-sdk/middleware-host-header":"3.391.0","@aws-sdk/middleware-logger":"3.391.0","@aws-sdk/middleware-recursion-detection":"3.391.0","@aws-sdk/middleware-sdk-sts":"3.391.0","@aws-sdk/middleware-signing":"3.391.0","@aws-sdk/middleware-user-agent":"3.391.0","@aws-sdk/types":"3.391.0","@aws-sdk/util-endpoints":"3.391.0","@aws-sdk/util-user-agent-browser":"3.391.0","@aws-sdk/util-user-agent-node":"3.391.0","@smithy/config-resolver":"^2.0.3","@smithy/fetch-http-handler":"^2.0.3","@smithy/hash-node":"^2.0.3","@smithy/invalid-dependency":"^2.0.3","@smithy/middleware-content-length":"^2.0.3","@smithy/middleware-endpoint":"^2.0.3","@smithy/middleware-retry":"^2.0.3","@smithy/middleware-serde":"^2.0.3","@smithy/middleware-stack":"^2.0.0","@smithy/node-config-provider":"^2.0.3","@smithy/node-http-handler":"^2.0.3","@smithy/protocol-http":"^2.0.3","@smithy/smithy-client":"^2.0.3","@smithy/types":"^2.2.0","@smithy/url-parser":"^2.0.3","@smithy/util-base64":"^2.0.0","@smithy/util-body-length-browser":"^2.0.0","@smithy/util-body-length-node":"^2.0.0","@smithy/util-defaults-mode-browser":"^2.0.3","@smithy/util-defaults-mode-node":"^2.0.3","@smithy/util-retry":"^2.0.0","@smithy/util-utf8":"^2.0.0","fast-xml-parser":"4.2.5","tslib":"^2.5.0"},"devDependencies":{"@smithy/service-client-documentation-generator":"^2.0.0","@tsconfig/node14":"1.0.3","@types/node":"^14.14.31","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typedoc":"0.23.23","typescript":"~4.9.5"},"engines":{"node":">=14.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sts","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-sts"}}'); + +/***/ }), + +/***/ 5367: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('{"partitions":[{"id":"aws","outputs":{"dnsSuffix":"amazonaws.com","dualStackDnsSuffix":"api.aws","name":"aws","supportsDualStack":true,"supportsFIPS":true},"regionRegex":"^(us|eu|ap|sa|ca|me|af|il)\\\\-\\\\w+\\\\-\\\\d+$","regions":{"af-south-1":{"description":"Africa (Cape Town)"},"ap-east-1":{"description":"Asia Pacific (Hong Kong)"},"ap-northeast-1":{"description":"Asia Pacific (Tokyo)"},"ap-northeast-2":{"description":"Asia Pacific (Seoul)"},"ap-northeast-3":{"description":"Asia Pacific (Osaka)"},"ap-south-1":{"description":"Asia Pacific (Mumbai)"},"ap-south-2":{"description":"Asia Pacific (Hyderabad)"},"ap-southeast-1":{"description":"Asia Pacific (Singapore)"},"ap-southeast-2":{"description":"Asia Pacific (Sydney)"},"ap-southeast-3":{"description":"Asia Pacific (Jakarta)"},"ap-southeast-4":{"description":"Asia Pacific (Melbourne)"},"aws-global":{"description":"AWS Standard global region"},"ca-central-1":{"description":"Canada (Central)"},"eu-central-1":{"description":"Europe (Frankfurt)"},"eu-central-2":{"description":"Europe (Zurich)"},"eu-north-1":{"description":"Europe (Stockholm)"},"eu-south-1":{"description":"Europe (Milan)"},"eu-south-2":{"description":"Europe (Spain)"},"eu-west-1":{"description":"Europe (Ireland)"},"eu-west-2":{"description":"Europe (London)"},"eu-west-3":{"description":"Europe (Paris)"},"il-central-1":{"description":"Israel (Tel Aviv)"},"me-central-1":{"description":"Middle East (UAE)"},"me-south-1":{"description":"Middle East (Bahrain)"},"sa-east-1":{"description":"South America (Sao Paulo)"},"us-east-1":{"description":"US East (N. Virginia)"},"us-east-2":{"description":"US East (Ohio)"},"us-west-1":{"description":"US West (N. California)"},"us-west-2":{"description":"US West (Oregon)"}}},{"id":"aws-cn","outputs":{"dnsSuffix":"amazonaws.com.cn","dualStackDnsSuffix":"api.amazonwebservices.com.cn","name":"aws-cn","supportsDualStack":true,"supportsFIPS":true},"regionRegex":"^cn\\\\-\\\\w+\\\\-\\\\d+$","regions":{"aws-cn-global":{"description":"AWS China global region"},"cn-north-1":{"description":"China (Beijing)"},"cn-northwest-1":{"description":"China (Ningxia)"}}},{"id":"aws-us-gov","outputs":{"dnsSuffix":"amazonaws.com","dualStackDnsSuffix":"api.aws","name":"aws-us-gov","supportsDualStack":true,"supportsFIPS":true},"regionRegex":"^us\\\\-gov\\\\-\\\\w+\\\\-\\\\d+$","regions":{"aws-us-gov-global":{"description":"AWS GovCloud (US) global region"},"us-gov-east-1":{"description":"AWS GovCloud (US-East)"},"us-gov-west-1":{"description":"AWS GovCloud (US-West)"}}},{"id":"aws-iso","outputs":{"dnsSuffix":"c2s.ic.gov","dualStackDnsSuffix":"c2s.ic.gov","name":"aws-iso","supportsDualStack":false,"supportsFIPS":true},"regionRegex":"^us\\\\-iso\\\\-\\\\w+\\\\-\\\\d+$","regions":{"aws-iso-global":{"description":"AWS ISO (US) global region"},"us-iso-east-1":{"description":"US ISO East"},"us-iso-west-1":{"description":"US ISO WEST"}}},{"id":"aws-iso-b","outputs":{"dnsSuffix":"sc2s.sgov.gov","dualStackDnsSuffix":"sc2s.sgov.gov","name":"aws-iso-b","supportsDualStack":false,"supportsFIPS":true},"regionRegex":"^us\\\\-isob\\\\-\\\\w+\\\\-\\\\d+$","regions":{"aws-iso-b-global":{"description":"AWS ISOB (US) global region"},"us-isob-east-1":{"description":"US ISOB East (Ohio)"}}},{"id":"aws-iso-e","outputs":{"dnsSuffix":"cloud.adc-e.uk","dualStackDnsSuffix":"cloud.adc-e.uk","name":"aws-iso-e","supportsDualStack":false,"supportsFIPS":true},"regionRegex":"^eu\\\\-isoe\\\\-\\\\w+\\\\-\\\\d+$","regions":{}},{"id":"aws-iso-f","outputs":{"dnsSuffix":"csp.hci.ic.gov","dualStackDnsSuffix":"csp.hci.ic.gov","name":"aws-iso-f","supportsDualStack":false,"supportsFIPS":true},"regionRegex":"^us\\\\-isof\\\\-\\\\w+\\\\-\\\\d+$","regions":{}}],"version":"1.1"}'); + /***/ }) /******/ }); @@ -2876,7 +21005,7 @@ module.exports = require("util"); /******/ // startup /******/ // Load entry module and return exports /******/ // This entry module is referenced by other modules so it can't be inlined -/******/ var __webpack_exports__ = __nccwpck_require__(722); +/******/ var __webpack_exports__ = __nccwpck_require__(7424); /******/ module.exports = __webpack_exports__; /******/ /******/ })() diff --git a/dist/cleanup/src/CredentialsClient.d.ts b/dist/cleanup/src/CredentialsClient.d.ts new file mode 100644 index 000000000..a8bf0d0e4 --- /dev/null +++ b/dist/cleanup/src/CredentialsClient.d.ts @@ -0,0 +1,14 @@ +import { STSClient } from '@aws-sdk/client-sts'; +export interface CredentialsClientProps { + region?: string; + proxyServer?: string; +} +export declare class CredentialsClient { + region?: string; + private _stsClient?; + private readonly requestHandler?; + constructor(props: CredentialsClientProps); + get stsClient(): STSClient; + validateCredentials(expectedAccessKeyId?: string, roleChaining?: boolean): Promise; + private loadCredentials; +} diff --git a/dist/cleanup/src/assumeRole.d.ts b/dist/cleanup/src/assumeRole.d.ts new file mode 100644 index 000000000..7ef038352 --- /dev/null +++ b/dist/cleanup/src/assumeRole.d.ts @@ -0,0 +1,15 @@ +import type { CredentialsClient } from './CredentialsClient'; +export interface assumeRoleParams { + credentialsClient: CredentialsClient; + roleToAssume: string; + roleDuration: number; + roleSessionName: string; + roleSkipSessionTagging?: boolean; + sourceAccountId?: string; + roleExternalId?: string; + webIdentityTokenFile?: string; + webIdentityToken?: string; + inlineSessionPolicy?: string; + managedSessionPolicies?: any[]; +} +export declare function assumeRole(params: assumeRoleParams): Promise; diff --git a/dist/cleanup/src/cleanup/index.d.ts b/dist/cleanup/src/cleanup/index.d.ts new file mode 100644 index 000000000..e2bed3015 --- /dev/null +++ b/dist/cleanup/src/cleanup/index.d.ts @@ -0,0 +1,11 @@ +/** + * When the GitHub Actions job is done, clean up any environment variables that + * may have been set by the configure-aws-credentials steps in the job. + * + * Environment variables are not intended to be shared across different jobs in + * the same GitHub Actions workflow: GitHub Actions documentation states that + * each job runs in a fresh instance. However, doing our own cleanup will + * give us additional assurance that these environment variables are not shared + * with any other jobs. + */ +export declare function cleanup(): void; diff --git a/dist/cleanup/src/helpers.d.ts b/dist/cleanup/src/helpers.d.ts new file mode 100644 index 000000000..1df20f04b --- /dev/null +++ b/dist/cleanup/src/helpers.d.ts @@ -0,0 +1,16 @@ +import type { Credentials } from '@aws-sdk/client-sts'; +import type { CredentialsClient } from './CredentialsClient'; +export declare function exportCredentials(creds?: Partial, outputCredentials?: boolean): void; +export declare function unsetCredentials(): void; +export declare function exportRegion(region: string): void; +export declare function exportAccountId(credentialsClient: CredentialsClient, maskAccountId?: boolean): Promise; +export declare function sanitizeGitHubVariables(name: string): string; +export declare function defaultSleep(ms: number): Promise; +declare let sleep: typeof defaultSleep; +export declare function withsleep(s: typeof sleep): void; +export declare function reset(): void; +export declare function verifyKeys(creds: Partial | undefined): void; +export declare function retryAndBackoff(fn: () => Promise, isRetryable: boolean, maxRetries?: number, retries?: number, base?: number): Promise; +export declare function errorMessage(error: unknown): string; +export declare function isDefined(i: T | undefined | null): i is T; +export {}; diff --git a/dist/cleanup/src/index.d.ts b/dist/cleanup/src/index.d.ts new file mode 100644 index 000000000..1aeec715e --- /dev/null +++ b/dist/cleanup/src/index.d.ts @@ -0,0 +1 @@ +export declare function run(): Promise; diff --git a/dist/cleanup/test/cleanup.test.d.ts b/dist/cleanup/test/cleanup.test.d.ts new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/dist/cleanup/test/cleanup.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/dist/cleanup/test/helpers.test.d.ts b/dist/cleanup/test/helpers.test.d.ts new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/dist/cleanup/test/helpers.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/dist/cleanup/test/index.test.d.ts b/dist/cleanup/test/index.test.d.ts new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/dist/cleanup/test/index.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/dist/index.js b/dist/index.js index f4539daeb..ca5eefa0d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,6 +1,578 @@ /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ +/***/ 3301: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CredentialsClient = void 0; +const core_1 = __nccwpck_require__(42186); +const client_sts_1 = __nccwpck_require__(52209); +const node_http_handler_1 = __nccwpck_require__(68805); +const https_proxy_agent_1 = __nccwpck_require__(77219); +const helpers_1 = __nccwpck_require__(49787); +const USER_AGENT = 'configure-aws-credentials-for-github-actions'; +class CredentialsClient { + constructor(props) { + this.region = props.region; + if (props.proxyServer) { + (0, core_1.info)('Configuring proxy handler for STS client'); + const handler = new https_proxy_agent_1.HttpsProxyAgent(props.proxyServer); + this.requestHandler = new node_http_handler_1.NodeHttpHandler({ + httpAgent: handler, + httpsAgent: handler, + }); + } + } + get stsClient() { + if (!this._stsClient) { + this._stsClient = new client_sts_1.STSClient({ + region: this.region, + customUserAgent: USER_AGENT, + requestHandler: this.requestHandler ? this.requestHandler : undefined, + }); + } + return this._stsClient; + } + async validateCredentials(expectedAccessKeyId, roleChaining) { + let credentials; + try { + credentials = await this.loadCredentials(); + if (!credentials.accessKeyId) { + throw new Error('Access key ID empty after loading credentials'); + } + } + catch (error) { + throw new Error(`Credentials could not be loaded, please check your action inputs: ${(0, helpers_1.errorMessage)(error)}`); + } + if (!roleChaining) { + const actualAccessKeyId = credentials.accessKeyId; + if (expectedAccessKeyId && expectedAccessKeyId !== actualAccessKeyId) { + throw new Error('Unexpected failure: Credentials loaded by the SDK do not match the access key ID configured by the action'); + } + } + } + async loadCredentials() { + const client = new client_sts_1.STSClient({ + requestHandler: this.requestHandler ? this.requestHandler : undefined, + }); + return client.config.credentials(); + } +} +exports.CredentialsClient = CredentialsClient; +//# sourceMappingURL=CredentialsClient.js.map + +/***/ }), + +/***/ 61209: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.assumeRole = void 0; +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +const core = __importStar(__nccwpck_require__(42186)); +const client_sts_1 = __nccwpck_require__(52209); +const helpers_1 = __nccwpck_require__(49787); +async function assumeRoleWithOIDC(params, client, webIdentityToken) { + delete params.Tags; + core.info('Assuming role with OIDC'); + try { + const creds = await client.send(new client_sts_1.AssumeRoleWithWebIdentityCommand({ + ...params, + WebIdentityToken: webIdentityToken, + })); + (0, helpers_1.verifyKeys)(creds.Credentials); + return creds; + } + catch (error) { + throw new Error(`Could not assume role with OIDC: ${(0, helpers_1.errorMessage)(error)}`); + } +} +async function assumeRoleWithWebIdentityTokenFile(params, client, webIdentityTokenFile, workspace) { + core.debug('webIdentityTokenFile provided. Will call sts:AssumeRoleWithWebIdentity and take session tags from token contents.'); + const webIdentityTokenFilePath = path_1.default.isAbsolute(webIdentityTokenFile) + ? webIdentityTokenFile + : path_1.default.join(workspace, webIdentityTokenFile); + if (!fs_1.default.existsSync(webIdentityTokenFilePath)) { + throw new Error(`Web identity token file does not exist: ${webIdentityTokenFilePath}`); + } + core.info('Assuming role with web identity token file'); + try { + const webIdentityToken = fs_1.default.readFileSync(webIdentityTokenFilePath, 'utf8'); + delete params.Tags; + const creds = await client.send(new client_sts_1.AssumeRoleWithWebIdentityCommand({ + ...params, + WebIdentityToken: webIdentityToken, + })); + (0, helpers_1.verifyKeys)(creds.Credentials); + return creds; + } + catch (error) { + throw new Error(`Could not assume role with web identity token file: ${(0, helpers_1.errorMessage)(error)}`); + } +} +async function assumeRoleWithCredentials(params, client) { + core.info('Assuming role with user credentials'); + try { + const creds = await client.send(new client_sts_1.AssumeRoleCommand({ ...params })); + (0, helpers_1.verifyKeys)(creds.Credentials); + return creds; + } + catch (error) { + throw new Error(`Could not assume role with user credentials: ${(0, helpers_1.errorMessage)(error)}`); + } +} +async function assumeRole(params) { + const { credentialsClient, sourceAccountId, roleToAssume, roleExternalId, roleDuration, roleSessionName, roleSkipSessionTagging, webIdentityTokenFile, webIdentityToken, inlineSessionPolicy, managedSessionPolicies, } = { ...params }; + // Load GitHub environment variables + const { GITHUB_REPOSITORY, GITHUB_WORKFLOW, GITHUB_ACTION, GITHUB_ACTOR, GITHUB_SHA, GITHUB_WORKSPACE } = process.env; + if (!GITHUB_REPOSITORY || !GITHUB_WORKFLOW || !GITHUB_ACTION || !GITHUB_ACTOR || !GITHUB_SHA || !GITHUB_WORKSPACE) { + throw new Error('Missing required environment variables. Are you running in GitHub Actions?'); + } + // Load role session tags + const tagArray = [ + { Key: 'GitHub', Value: 'Actions' }, + { Key: 'Repository', Value: GITHUB_REPOSITORY }, + { Key: 'Workflow', Value: (0, helpers_1.sanitizeGitHubVariables)(GITHUB_WORKFLOW) }, + { Key: 'Action', Value: GITHUB_ACTION }, + { Key: 'Actor', Value: (0, helpers_1.sanitizeGitHubVariables)(GITHUB_ACTOR) }, + { Key: 'Commit', Value: GITHUB_SHA }, + ]; + if (process.env['GITHUB_REF']) { + tagArray.push({ Key: 'Branch', Value: (0, helpers_1.sanitizeGitHubVariables)(process.env['GITHUB_REF']) }); + } + const tags = roleSkipSessionTagging ? undefined : tagArray; + if (!tags) { + core.debug('Role session tagging has been skipped.'); + } + else { + core.debug(`${tags.length} role session tags are being used.`); + } + // Calculate role ARN from name and account ID (currently only supports `aws` partition) + let roleArn = roleToAssume; + if (!roleArn.startsWith('arn:aws')) { + (0, assert_1.default)((0, helpers_1.isDefined)(sourceAccountId), 'Source Account ID is needed if the Role Name is provided and not the Role Arn.'); + roleArn = `arn:aws:iam::${sourceAccountId}:role/${roleArn}`; + } + // Ready common parameters to assume role + const commonAssumeRoleParams = { + RoleArn: roleArn, + RoleSessionName: roleSessionName, + DurationSeconds: roleDuration, + Tags: tags ? tags : undefined, + ExternalId: roleExternalId ? roleExternalId : undefined, + Policy: inlineSessionPolicy ? inlineSessionPolicy : undefined, + PolicyArns: managedSessionPolicies?.length ? managedSessionPolicies : undefined, + }; + const keys = Object.keys(commonAssumeRoleParams); + keys.forEach((k) => commonAssumeRoleParams[k] === undefined && delete commonAssumeRoleParams[k]); + // Instantiate STS client + const stsClient = credentialsClient.stsClient; + // Assume role using one of three methods + switch (true) { + case !!webIdentityToken: { + return assumeRoleWithOIDC(commonAssumeRoleParams, stsClient, webIdentityToken); + } + case !!webIdentityTokenFile: { + return assumeRoleWithWebIdentityTokenFile(commonAssumeRoleParams, stsClient, webIdentityTokenFile, GITHUB_WORKSPACE); + } + default: { + return assumeRoleWithCredentials(commonAssumeRoleParams, stsClient); + } + } +} +exports.assumeRole = assumeRole; +//# sourceMappingURL=assumeRole.js.map + +/***/ }), + +/***/ 49787: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDefined = exports.errorMessage = exports.retryAndBackoff = exports.verifyKeys = exports.reset = exports.withsleep = exports.defaultSleep = exports.sanitizeGitHubVariables = exports.exportAccountId = exports.exportRegion = exports.unsetCredentials = exports.exportCredentials = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const client_sts_1 = __nccwpck_require__(52209); +const MAX_TAG_VALUE_LENGTH = 256; +const SANITIZATION_CHARACTER = '_'; +const SPECIAL_CHARS_REGEX = /[!@#$%^&*()_+\-=[\]{};':"\\|,.<>/?]+/; +// Configure the AWS CLI and AWS SDKs using environment variables and set them as secrets. +// Setting the credentials as secrets masks them in Github Actions logs +function exportCredentials(creds, outputCredentials) { + if (creds?.AccessKeyId) { + core.setSecret(creds.AccessKeyId); + core.exportVariable('AWS_ACCESS_KEY_ID', creds.AccessKeyId); + } + if (creds?.SecretAccessKey) { + core.setSecret(creds.SecretAccessKey); + core.exportVariable('AWS_SECRET_ACCESS_KEY', creds.SecretAccessKey); + } + if (creds?.SessionToken) { + core.setSecret(creds.SessionToken); + core.exportVariable('AWS_SESSION_TOKEN', creds.SessionToken); + } + else if (process.env['AWS_SESSION_TOKEN']) { + // clear session token from previous credentials action + core.exportVariable('AWS_SESSION_TOKEN', ''); + } + if (outputCredentials) { + if (creds?.AccessKeyId) { + core.setOutput('aws-access-key-id', creds.AccessKeyId); + } + if (creds?.SecretAccessKey) { + core.setOutput('aws-secret-access-key', creds.SecretAccessKey); + } + if (creds?.SessionToken) { + core.setOutput('aws-session-token', creds.SessionToken); + } + } +} +exports.exportCredentials = exportCredentials; +function unsetCredentials() { + core.exportVariable('AWS_ACCESS_KEY_ID', ''); + core.exportVariable('AWS_SECRET_ACCESS_KEY', ''); + core.exportVariable('AWS_SESSION_TOKEN', ''); + core.exportVariable('AWS_REGION', ''); + core.exportVariable('AWS_DEFAULT_REGION', ''); +} +exports.unsetCredentials = unsetCredentials; +function exportRegion(region) { + core.exportVariable('AWS_DEFAULT_REGION', region); + core.exportVariable('AWS_REGION', region); +} +exports.exportRegion = exportRegion; +// Obtains account ID from STS Client and sets it as output +async function exportAccountId(credentialsClient, maskAccountId) { + const client = credentialsClient.stsClient; + const identity = await client.send(new client_sts_1.GetCallerIdentityCommand({})); + const accountId = identity.Account; + if (!accountId) { + throw new Error('Could not get Account ID from STS. Did you set credentials?'); + } + if (maskAccountId) { + core.setSecret(accountId); + } + core.setOutput('aws-account-id', accountId); + return accountId; +} +exports.exportAccountId = exportAccountId; +// Tags have a more restrictive set of acceptable characters than GitHub environment variables can. +// This replaces anything not conforming to the tag restrictions by inverting the regular expression. +// See the AWS documentation for constraint specifics https://docs.aws.amazon.com/STS/latest/APIReference/API_Tag.html. +function sanitizeGitHubVariables(name) { + const nameWithoutSpecialCharacters = name.replace(/[^\p{L}\p{Z}\p{N}_.:/=+\-@]/gu, SANITIZATION_CHARACTER); + const nameTruncated = nameWithoutSpecialCharacters.slice(0, MAX_TAG_VALUE_LENGTH); + return nameTruncated; +} +exports.sanitizeGitHubVariables = sanitizeGitHubVariables; +async function defaultSleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +exports.defaultSleep = defaultSleep; +let sleep = defaultSleep; +function withsleep(s) { + sleep = s; +} +exports.withsleep = withsleep; +function reset() { + sleep = defaultSleep; +} +exports.reset = reset; +function verifyKeys(creds) { + if (!creds) { + return; + } + if (creds.AccessKeyId) { + if (SPECIAL_CHARS_REGEX.test(creds.AccessKeyId)) { + throw new Error('AccessKeyId contains special characters.'); + } + } + if (creds.SecretAccessKey) { + if (SPECIAL_CHARS_REGEX.test(creds.SecretAccessKey)) { + throw new Error('SecretAccessKey contains special characters.'); + } + } +} +exports.verifyKeys = verifyKeys; +// Retries the promise with exponential backoff if the error isRetryable up to maxRetries time. +async function retryAndBackoff(fn, isRetryable, maxRetries = 12, retries = 0, base = 50) { + try { + return await fn(); + } + catch (err) { + if (!isRetryable) { + throw err; + } + // It's retryable, so sleep and retry. + await sleep(Math.random() * (Math.pow(2, retries) * base)); + retries += 1; + if (retries >= maxRetries) { + throw err; + } + return await retryAndBackoff(fn, isRetryable, maxRetries, retries, base); + } +} +exports.retryAndBackoff = retryAndBackoff; +/* c8 ignore start */ +function errorMessage(error) { + return error instanceof Error ? error.message : String(error); +} +exports.errorMessage = errorMessage; +function isDefined(i) { + return i !== undefined && i !== null; +} +exports.isDefined = isDefined; +/* c8 ignore stop */ +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 71667: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const assumeRole_1 = __nccwpck_require__(61209); +const CredentialsClient_1 = __nccwpck_require__(3301); +const helpers_1 = __nccwpck_require__(49787); +const DEFAULT_ROLE_DURATION = 3600; // One hour (seconds) +const ROLE_SESSION_NAME = 'GitHubActions'; +const REGION_REGEX = /^[a-z0-9-]+$/g; +async function run() { + try { + // Get inputs + const AccessKeyId = core.getInput('aws-access-key-id', { required: false }); + const SecretAccessKey = core.getInput('aws-secret-access-key', { required: false }); + const sessionTokenInput = core.getInput('aws-session-token', { required: false }); + const SessionToken = sessionTokenInput === '' ? undefined : sessionTokenInput; + const region = core.getInput('aws-region', { required: true }); + const roleToAssume = core.getInput('role-to-assume', { required: false }); + const audience = core.getInput('audience', { required: false }); + const maskAccountIdInput = core.getInput('mask-aws-account-id', { required: false }) || 'false'; + const maskAccountId = maskAccountIdInput.toLowerCase() === 'true'; + const roleExternalId = core.getInput('role-external-id', { required: false }); + const webIdentityTokenFile = core.getInput('web-identity-token-file', { required: false }); + const roleDuration = parseInt(core.getInput('role-duration-seconds', { required: false })) || DEFAULT_ROLE_DURATION; + const roleSessionName = core.getInput('role-session-name', { required: false }) || ROLE_SESSION_NAME; + const roleSkipSessionTaggingInput = core.getInput('role-skip-session-tagging', { required: false }) || 'false'; + const roleSkipSessionTagging = roleSkipSessionTaggingInput.toLowerCase() === 'true'; + const proxyServer = core.getInput('http-proxy', { required: false }); + const inlineSessionPolicy = core.getInput('inline-session-policy', { required: false }); + const managedSessionPoliciesInput = core.getMultilineInput('managed-session-policies', { required: false }); + const managedSessionPolicies = []; + const roleChainingInput = core.getInput('role-chaining', { required: false }) || 'false'; + const roleChaining = roleChainingInput.toLowerCase() === 'true'; + const outputCredentialsInput = core.getInput('output-credentials', { required: false }) || 'false'; + const outputCredentials = outputCredentialsInput.toLowerCase() === 'true'; + const unsetCurrentCredentialsInput = core.getInput('unset-current-credentials', { required: false }) || 'false'; + const unsetCurrentCredentials = unsetCurrentCredentialsInput.toLowerCase() === 'true'; + const disableRetryInput = core.getInput('disable-retry', { required: false }) || 'false'; + const disableRetry = disableRetryInput.toLowerCase() === 'true'; + let maxRetries = parseInt(core.getInput('retry-max-attempts', { required: false })) || 12; + if (maxRetries < 1) { + maxRetries = 1; + } + for (const managedSessionPolicy of managedSessionPoliciesInput) { + managedSessionPolicies.push({ arn: managedSessionPolicy }); + } + // Logic to decide whether to attempt to use OIDC or not + const useGitHubOIDCProvider = () => { + // The `ACTIONS_ID_TOKEN_REQUEST_TOKEN` environment variable is set when the `id-token` permission is granted. + // This is necessary to authenticate with OIDC, but not strictly set just for OIDC. If it is not set and all other + // checks pass, it is likely but not guaranteed that the user needs but lacks this permission in their workflow. + // So, we will log a warning when it is the only piece absent + if (!!roleToAssume && + !webIdentityTokenFile && + !AccessKeyId && + !process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'] && + !roleChaining) { + core.info('It looks like you might be trying to authenticate with OIDC. Did you mean to set the `id-token` permission?'); + } + return (!!roleToAssume && + !!process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'] && + !AccessKeyId && + !webIdentityTokenFile && + !roleChaining); + }; + if (unsetCurrentCredentials) { + (0, helpers_1.unsetCredentials)(); + } + if (!region.match(REGION_REGEX)) { + throw new Error(`Region is not valid: ${region}`); + } + (0, helpers_1.exportRegion)(region); + // Instantiate credentials client + const credentialsClient = new CredentialsClient_1.CredentialsClient({ region, proxyServer }); + let sourceAccountId; + let webIdentityToken; + // If OIDC is being used, generate token + // Else, export credentials provided as input + if (useGitHubOIDCProvider()) { + try { + webIdentityToken = await (0, helpers_1.retryAndBackoff)(async () => { + return core.getIDToken(audience); + }, !disableRetry, maxRetries); + } + catch (error) { + throw new Error(`getIDToken call failed: ${(0, helpers_1.errorMessage)(error)}`); + } + } + else if (AccessKeyId) { + if (!SecretAccessKey) { + throw new Error("'aws-secret-access-key' must be provided if 'aws-access-key-id' is provided"); + } + // The STS client for calling AssumeRole pulls creds from the environment. + // Plus, in the assume role case, if the AssumeRole call fails, we want + // the source credentials to already be masked as secrets + // in any error messages. + (0, helpers_1.exportCredentials)({ AccessKeyId, SecretAccessKey, SessionToken }); + } + else if (!webIdentityTokenFile && !roleChaining) { + throw new Error('Could not determine how to assume credentials. Please check your inputs and try again.'); + } + if (AccessKeyId || roleChaining) { + // Validate that the SDK can actually pick up credentials. + // This validates cases where this action is using existing environment credentials, + // and cases where the user intended to provide input credentials but the secrets inputs resolved to empty strings. + await credentialsClient.validateCredentials(AccessKeyId, roleChaining); + sourceAccountId = await (0, helpers_1.exportAccountId)(credentialsClient, maskAccountId); + } + // Get role credentials if configured to do so + if (roleToAssume) { + const roleCredentials = await (0, helpers_1.retryAndBackoff)(async () => { + return (0, assumeRole_1.assumeRole)({ + credentialsClient, + sourceAccountId, + roleToAssume, + roleExternalId, + roleDuration, + roleSessionName, + roleSkipSessionTagging, + webIdentityTokenFile, + webIdentityToken, + inlineSessionPolicy, + managedSessionPolicies, + }); + }, !disableRetry, maxRetries); + core.info(`Authenticated as assumedRoleId ${roleCredentials.AssumedRoleUser.AssumedRoleId}`); + (0, helpers_1.exportCredentials)(roleCredentials.Credentials, outputCredentials); + // We need to validate the credentials in 2 of our use-cases + // First: self-hosted runners. If the GITHUB_ACTIONS environment variable + // is set to `true` then we are NOT in a self-hosted runner. + // Second: Customer provided credentials manually (IAM User keys stored in GH Secrets) + if (!process.env['GITHUB_ACTIONS'] || AccessKeyId) { + await credentialsClient.validateCredentials(roleCredentials.Credentials?.AccessKeyId); + } + await (0, helpers_1.exportAccountId)(credentialsClient, maskAccountId); + } + else { + core.info('Proceeding with IAM user credentials'); + } + } + catch (error) { + core.setFailed((0, helpers_1.errorMessage)(error)); + const showStackTrace = process.env['SHOW_STACK_TRACE']; + if (showStackTrace === 'true') { + throw error; + } + } +} +exports.run = run; +/* c8 ignore start */ +if (require.main === require.cache[eval('__filename')]) { + (async () => { + await run(); + })().catch((error) => { + core.setFailed((0, helpers_1.errorMessage)(error)); + }); +} +//# sourceMappingURL=index.js.map + +/***/ }), + /***/ 87351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -1211,6 +1783,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -1715,7 +2300,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -1777,47382 +2368,22088 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 49690: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { +/***/ 32374: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const events_1 = __nccwpck_require__(82361); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const promisify_1 = __importDefault(__nccwpck_require__(66570)); -const debug = debug_1.default('agent-base'); -function isAgent(v) { - return Boolean(v) && typeof v.addRequest === 'function'; -} -function isSecureEndpoint() { - const { stack } = new Error(); - if (typeof stack !== 'string') - return false; - return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); -} -function createAgent(callback, opts) { - return new createAgent.Agent(callback, opts); -} -(function (createAgent) { - /** - * Base `http.Agent` implementation. - * No pooling/keep-alive is implemented by default. - * - * @param {Function} callback - * @api public - */ - class Agent extends events_1.EventEmitter { - constructor(callback, _opts) { - super(); - let opts = _opts; - if (typeof callback === 'function') { - this.callback = callback; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsCrc32 = void 0; +var tslib_1 = __nccwpck_require__(5066); +var util_1 = __nccwpck_require__(41236); +var index_1 = __nccwpck_require__(47327); +var AwsCrc32 = /** @class */ (function () { + function AwsCrc32() { + this.crc32 = new index_1.Crc32(); + } + AwsCrc32.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash)) + return; + this.crc32.update((0, util_1.convertToBuffer)(toHash)); + }; + AwsCrc32.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, (0, util_1.numToUint8)(this.crc32.digest())]; + }); + }); + }; + AwsCrc32.prototype.reset = function () { + this.crc32 = new index_1.Crc32(); + }; + return AwsCrc32; +}()); +exports.AwsCrc32 = AwsCrc32; +//# sourceMappingURL=aws_crc32.js.map + +/***/ }), + +/***/ 47327: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsCrc32 = exports.Crc32 = exports.crc32 = void 0; +var tslib_1 = __nccwpck_require__(5066); +var util_1 = __nccwpck_require__(41236); +function crc32(data) { + return new Crc32().update(data).digest(); +} +exports.crc32 = crc32; +var Crc32 = /** @class */ (function () { + function Crc32() { + this.checksum = 0xffffffff; + } + Crc32.prototype.update = function (data) { + var e_1, _a; + try { + for (var data_1 = tslib_1.__values(data), data_1_1 = data_1.next(); !data_1_1.done; data_1_1 = data_1.next()) { + var byte = data_1_1.value; + this.checksum = + (this.checksum >>> 8) ^ lookupTable[(this.checksum ^ byte) & 0xff]; } - else if (callback) { - opts = callback; - } - // Timeout for the socket to be returned from the callback - this.timeout = null; - if (opts && typeof opts.timeout === 'number') { - this.timeout = opts.timeout; - } - // These aren't actually used by `agent-base`, but are required - // for the TypeScript definition files in `@types/node` :/ - this.maxFreeSockets = 1; - this.maxSockets = 1; - this.maxTotalSockets = Infinity; - this.sockets = {}; - this.freeSockets = {}; - this.requests = {}; - this.options = {}; - } - get defaultPort() { - if (typeof this.explicitDefaultPort === 'number') { - return this.explicitDefaultPort; - } - return isSecureEndpoint() ? 443 : 80; - } - set defaultPort(v) { - this.explicitDefaultPort = v; } - get protocol() { - if (typeof this.explicitProtocol === 'string') { - return this.explicitProtocol; + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (data_1_1 && !data_1_1.done && (_a = data_1.return)) _a.call(data_1); } - return isSecureEndpoint() ? 'https:' : 'http:'; - } - set protocol(v) { - this.explicitProtocol = v; - } - callback(req, opts, fn) { - throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + finally { if (e_1) throw e_1.error; } } - /** - * Called by node-core's "_http_client.js" module when creating - * a new HTTP request with this Agent instance. - * - * @api public - */ - addRequest(req, _opts) { - const opts = Object.assign({}, _opts); - if (typeof opts.secureEndpoint !== 'boolean') { - opts.secureEndpoint = isSecureEndpoint(); - } - if (opts.host == null) { - opts.host = 'localhost'; - } - if (opts.port == null) { - opts.port = opts.secureEndpoint ? 443 : 80; - } - if (opts.protocol == null) { - opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; - } - if (opts.host && opts.path) { - // If both a `host` and `path` are specified then it's most - // likely the result of a `url.parse()` call... we need to - // remove the `path` portion so that `net.connect()` doesn't - // attempt to open that as a unix socket file. - delete opts.path; - } - delete opts.agent; - delete opts.hostname; - delete opts._defaultAgent; - delete opts.defaultPort; - delete opts.createConnection; - // Hint to use "Connection: close" - // XXX: non-documented `http` module API :( - req._last = true; - req.shouldKeepAlive = false; - let timedOut = false; - let timeoutId = null; - const timeoutMs = opts.timeout || this.timeout; - const onerror = (err) => { - if (req._hadError) - return; - req.emit('error', err); - // For Safety. Some additional errors might fire later on - // and we need to make sure we don't double-fire the error event. - req._hadError = true; - }; - const ontimeout = () => { - timeoutId = null; - timedOut = true; - const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); - err.code = 'ETIMEOUT'; - onerror(err); - }; - const callbackError = (err) => { - if (timedOut) - return; - if (timeoutId !== null) { - clearTimeout(timeoutId); - timeoutId = null; - } - onerror(err); - }; - const onsocket = (socket) => { - if (timedOut) - return; - if (timeoutId != null) { - clearTimeout(timeoutId); - timeoutId = null; - } - if (isAgent(socket)) { - // `socket` is actually an `http.Agent` instance, so - // relinquish responsibility for this `req` to the Agent - // from here on - debug('Callback returned another Agent instance %o', socket.constructor.name); - socket.addRequest(req, opts); - return; - } - if (socket) { - socket.once('free', () => { - this.freeSocket(socket, opts); - }); - req.onSocket(socket); - return; - } - const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); - onerror(err); - }; - if (typeof this.callback !== 'function') { - onerror(new Error('`callback` is not defined')); - return; - } - if (!this.promisifiedCallback) { - if (this.callback.length >= 3) { - debug('Converting legacy callback function to promise'); - this.promisifiedCallback = promisify_1.default(this.callback); - } - else { - this.promisifiedCallback = this.callback; - } - } - if (typeof timeoutMs === 'number' && timeoutMs > 0) { - timeoutId = setTimeout(ontimeout, timeoutMs); - } - if ('port' in opts && typeof opts.port !== 'number') { - opts.port = Number(opts.port); - } - try { - debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); - Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + return this; + }; + Crc32.prototype.digest = function () { + return (this.checksum ^ 0xffffffff) >>> 0; + }; + return Crc32; +}()); +exports.Crc32 = Crc32; +// prettier-ignore +var a_lookUpTable = [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, + 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, + 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, + 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, + 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, + 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, + 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, + 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, + 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, + 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, + 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, + 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, + 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, + 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, + 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, + 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, + 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, + 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, + 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, + 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, + 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, + 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, + 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, + 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, + 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, + 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, + 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, + 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, + 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, + 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, + 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, + 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, + 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, + 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, + 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, + 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, + 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, + 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, + 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, + 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, + 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, + 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, + 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, + 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, + 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, + 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, + 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, + 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, + 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, + 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, + 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, + 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D, +]; +var lookupTable = (0, util_1.uint32ArrayFrom)(a_lookUpTable); +var aws_crc32_1 = __nccwpck_require__(32374); +Object.defineProperty(exports, "AwsCrc32", ({ enumerable: true, get: function () { return aws_crc32_1.AwsCrc32; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5066: +/***/ ((module) => { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); } - catch (err) { - Promise.reject(err).catch(callbackError); + else { + exports.__esModule = true; } } - freeSocket(socket, opts) { - debug('Freeing socket %o %o', socket.constructor.name, opts); - socket.destroy(); - } - destroy() { - debug('Destroying agent %o', this.constructor.name); - } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; } - createAgent.Agent = Agent; - // So that `instanceof` works correctly - createAgent.prototype = createAgent.Agent.prototype; -})(createAgent || (createAgent = {})); -module.exports = createAgent; -//# sourceMappingURL=index.js.map +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; -/***/ }), + __extends = function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; -/***/ 66570: -/***/ ((__unused_webpack_module, exports) => { + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; -"use strict"; + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -function promisify(fn) { - return function (req, opts) { - return new Promise((resolve, reject) => { - fn.call(this, req, opts, (err, rtn) => { - if (err) { - reject(err); - } - else { - resolve(rtn); - } - }); - }); + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; }; -} -exports["default"] = promisify; -//# sourceMappingURL=promisify.js.map -/***/ }), + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; -/***/ 20940: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['accessanalyzer'] = {}; -AWS.AccessAnalyzer = Service.defineService('accessanalyzer', ['2019-11-01']); -Object.defineProperty(apiLoader.services['accessanalyzer'], '2019-11-01', { - get: function get() { - var model = __nccwpck_require__(30590); - model.paginators = (__nccwpck_require__(63080)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; -module.exports = AWS.AccessAnalyzer; + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + __createBinding = function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }; -/***/ }), + __exportStar = function (m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; + }; -/***/ 32400: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['account'] = {}; -AWS.Account = Service.defineService('account', ['2021-02-01']); -Object.defineProperty(apiLoader.services['account'], '2021-02-01', { - get: function get() { - var model = __nccwpck_require__(36713); - model.paginators = (__nccwpck_require__(52324)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; -module.exports = AWS.Account; + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; -/***/ }), + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; -/***/ 30838: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['acm'] = {}; -AWS.ACM = Service.defineService('acm', ['2015-12-08']); -Object.defineProperty(apiLoader.services['acm'], '2015-12-08', { - get: function get() { - var model = __nccwpck_require__(34662); - model.paginators = (__nccwpck_require__(42680)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(85678)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; -module.exports = AWS.ACM; + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; + }; -/***/ }), + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; -/***/ 18450: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + __classPrivateFieldGet = function (receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); + }; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['acmpca'] = {}; -AWS.ACMPCA = Service.defineService('acmpca', ['2017-08-22']); -Object.defineProperty(apiLoader.services['acmpca'], '2017-08-22', { - get: function get() { - var model = __nccwpck_require__(33004); - model.paginators = (__nccwpck_require__(21209)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(89217)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); + __classPrivateFieldSet = function (receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; + }; -module.exports = AWS.ACMPCA; + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); /***/ }), -/***/ 14578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 43228: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['alexaforbusiness'] = {}; -AWS.AlexaForBusiness = Service.defineService('alexaforbusiness', ['2017-11-09']); -Object.defineProperty(apiLoader.services['alexaforbusiness'], '2017-11-09', { - get: function get() { - var model = __nccwpck_require__(69786); - model.paginators = (__nccwpck_require__(21009)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.AlexaForBusiness; +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertToBuffer = void 0; +var util_utf8_browser_1 = __nccwpck_require__(28172); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_browser_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map /***/ }), -/***/ 26296: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -module.exports = { - ACM: __nccwpck_require__(30838), - APIGateway: __nccwpck_require__(91759), - ApplicationAutoScaling: __nccwpck_require__(25598), - AppStream: __nccwpck_require__(21730), - AutoScaling: __nccwpck_require__(31652), - Batch: __nccwpck_require__(10000), - Budgets: __nccwpck_require__(43923), - CloudDirectory: __nccwpck_require__(56231), - CloudFormation: __nccwpck_require__(74643), - CloudFront: __nccwpck_require__(48058), - CloudHSM: __nccwpck_require__(59976), - CloudSearch: __nccwpck_require__(72321), - CloudSearchDomain: __nccwpck_require__(64072), - CloudTrail: __nccwpck_require__(65512), - CloudWatch: __nccwpck_require__(6763), - CloudWatchEvents: __nccwpck_require__(38124), - CloudWatchLogs: __nccwpck_require__(96693), - CodeBuild: __nccwpck_require__(60450), - CodeCommit: __nccwpck_require__(71323), - CodeDeploy: __nccwpck_require__(54599), - CodePipeline: __nccwpck_require__(22938), - CognitoIdentity: __nccwpck_require__(58291), - CognitoIdentityServiceProvider: __nccwpck_require__(31379), - CognitoSync: __nccwpck_require__(74770), - ConfigService: __nccwpck_require__(34061), - CUR: __nccwpck_require__(5026), - DataPipeline: __nccwpck_require__(65688), - DeviceFarm: __nccwpck_require__(26272), - DirectConnect: __nccwpck_require__(73783), - DirectoryService: __nccwpck_require__(83908), - Discovery: __nccwpck_require__(81690), - DMS: __nccwpck_require__(69868), - DynamoDB: __nccwpck_require__(14347), - DynamoDBStreams: __nccwpck_require__(88090), - EC2: __nccwpck_require__(7778), - ECR: __nccwpck_require__(15211), - ECS: __nccwpck_require__(16615), - EFS: __nccwpck_require__(34375), - ElastiCache: __nccwpck_require__(81065), - ElasticBeanstalk: __nccwpck_require__(14897), - ELB: __nccwpck_require__(10907), - ELBv2: __nccwpck_require__(44311), - EMR: __nccwpck_require__(50470), - ES: __nccwpck_require__(84462), - ElasticTranscoder: __nccwpck_require__(40745), - Firehose: __nccwpck_require__(92831), - GameLift: __nccwpck_require__(8085), - Glacier: __nccwpck_require__(63249), - Health: __nccwpck_require__(21834), - IAM: __nccwpck_require__(50058), - ImportExport: __nccwpck_require__(6769), - Inspector: __nccwpck_require__(89439), - Iot: __nccwpck_require__(98392), - IotData: __nccwpck_require__(6564), - Kinesis: __nccwpck_require__(49876), - KinesisAnalytics: __nccwpck_require__(90042), - KMS: __nccwpck_require__(56782), - Lambda: __nccwpck_require__(13321), - LexRuntime: __nccwpck_require__(62716), - Lightsail: __nccwpck_require__(22718), - MachineLearning: __nccwpck_require__(82907), - MarketplaceCommerceAnalytics: __nccwpck_require__(4540), - MarketplaceMetering: __nccwpck_require__(39297), - MTurk: __nccwpck_require__(79954), - MobileAnalytics: __nccwpck_require__(66690), - OpsWorks: __nccwpck_require__(75691), - OpsWorksCM: __nccwpck_require__(80388), - Organizations: __nccwpck_require__(44670), - Pinpoint: __nccwpck_require__(18388), - Polly: __nccwpck_require__(97332), - RDS: __nccwpck_require__(71578), - Redshift: __nccwpck_require__(84853), - Rekognition: __nccwpck_require__(65470), - ResourceGroupsTaggingAPI: __nccwpck_require__(7385), - Route53: __nccwpck_require__(44968), - Route53Domains: __nccwpck_require__(51994), - S3: __nccwpck_require__(83256), - S3Control: __nccwpck_require__(99817), - ServiceCatalog: __nccwpck_require__(822), - SES: __nccwpck_require__(46816), - Shield: __nccwpck_require__(20271), - SimpleDB: __nccwpck_require__(10120), - SMS: __nccwpck_require__(57719), - Snowball: __nccwpck_require__(510), - SNS: __nccwpck_require__(28581), - SQS: __nccwpck_require__(63172), - SSM: __nccwpck_require__(83380), - StorageGateway: __nccwpck_require__(89190), - StepFunctions: __nccwpck_require__(8136), - STS: __nccwpck_require__(57513), - Support: __nccwpck_require__(1099), - SWF: __nccwpck_require__(32327), - XRay: __nccwpck_require__(41548), - WAF: __nccwpck_require__(72742), - WAFRegional: __nccwpck_require__(23153), - WorkDocs: __nccwpck_require__(38835), - WorkSpaces: __nccwpck_require__(25513), - CodeStar: __nccwpck_require__(98336), - LexModelBuildingService: __nccwpck_require__(37397), - MarketplaceEntitlementService: __nccwpck_require__(53707), - Athena: __nccwpck_require__(29434), - Greengrass: __nccwpck_require__(20690), - DAX: __nccwpck_require__(71398), - MigrationHub: __nccwpck_require__(14688), - CloudHSMV2: __nccwpck_require__(70889), - Glue: __nccwpck_require__(31658), - Mobile: __nccwpck_require__(39782), - Pricing: __nccwpck_require__(92765), - CostExplorer: __nccwpck_require__(79523), - MediaConvert: __nccwpck_require__(57220), - MediaLive: __nccwpck_require__(7509), - MediaPackage: __nccwpck_require__(91620), - MediaStore: __nccwpck_require__(83748), - MediaStoreData: __nccwpck_require__(98703), - AppSync: __nccwpck_require__(12402), - GuardDuty: __nccwpck_require__(40755), - MQ: __nccwpck_require__(23093), - Comprehend: __nccwpck_require__(62878), - IoTJobsDataPlane: __nccwpck_require__(42332), - KinesisVideoArchivedMedia: __nccwpck_require__(5580), - KinesisVideoMedia: __nccwpck_require__(81308), - KinesisVideo: __nccwpck_require__(89927), - SageMakerRuntime: __nccwpck_require__(85044), - SageMaker: __nccwpck_require__(77657), - Translate: __nccwpck_require__(72544), - ResourceGroups: __nccwpck_require__(58756), - AlexaForBusiness: __nccwpck_require__(14578), - Cloud9: __nccwpck_require__(85473), - ServerlessApplicationRepository: __nccwpck_require__(62402), - ServiceDiscovery: __nccwpck_require__(91569), - WorkMail: __nccwpck_require__(38374), - AutoScalingPlans: __nccwpck_require__(2554), - TranscribeService: __nccwpck_require__(75811), - Connect: __nccwpck_require__(13879), - ACMPCA: __nccwpck_require__(18450), - FMS: __nccwpck_require__(11316), - SecretsManager: __nccwpck_require__(85131), - IoTAnalytics: __nccwpck_require__(67409), - IoT1ClickDevicesService: __nccwpck_require__(39474), - IoT1ClickProjects: __nccwpck_require__(4686), - PI: __nccwpck_require__(15505), - Neptune: __nccwpck_require__(30047), - MediaTailor: __nccwpck_require__(99658), - EKS: __nccwpck_require__(23337), - Macie: __nccwpck_require__(86427), - DLM: __nccwpck_require__(24958), - Signer: __nccwpck_require__(71596), - Chime: __nccwpck_require__(84646), - PinpointEmail: __nccwpck_require__(83060), - RAM: __nccwpck_require__(94394), - Route53Resolver: __nccwpck_require__(25894), - PinpointSMSVoice: __nccwpck_require__(46605), - QuickSight: __nccwpck_require__(29898), - RDSDataService: __nccwpck_require__(30147), - Amplify: __nccwpck_require__(38090), - DataSync: __nccwpck_require__(25308), - RoboMaker: __nccwpck_require__(18068), - Transfer: __nccwpck_require__(51585), - GlobalAccelerator: __nccwpck_require__(19306), - ComprehendMedical: __nccwpck_require__(32349), - KinesisAnalyticsV2: __nccwpck_require__(74631), - MediaConnect: __nccwpck_require__(67639), - FSx: __nccwpck_require__(60642), - SecurityHub: __nccwpck_require__(21550), - AppMesh: __nccwpck_require__(69226), - LicenseManager: __nccwpck_require__(34693), - Kafka: __nccwpck_require__(56775), - ApiGatewayManagementApi: __nccwpck_require__(31762), - ApiGatewayV2: __nccwpck_require__(44987), - DocDB: __nccwpck_require__(55129), - Backup: __nccwpck_require__(82455), - WorkLink: __nccwpck_require__(48579), - Textract: __nccwpck_require__(58523), - ManagedBlockchain: __nccwpck_require__(85143), - MediaPackageVod: __nccwpck_require__(14962), - GroundStation: __nccwpck_require__(80494), - IoTThingsGraph: __nccwpck_require__(58905), - IoTEvents: __nccwpck_require__(88065), - IoTEventsData: __nccwpck_require__(56973), - Personalize: __nccwpck_require__(33696), - PersonalizeEvents: __nccwpck_require__(88170), - PersonalizeRuntime: __nccwpck_require__(66184), - ApplicationInsights: __nccwpck_require__(83972), - ServiceQuotas: __nccwpck_require__(57800), - EC2InstanceConnect: __nccwpck_require__(92209), - EventBridge: __nccwpck_require__(898), - LakeFormation: __nccwpck_require__(6726), - ForecastService: __nccwpck_require__(12942), - ForecastQueryService: __nccwpck_require__(36822), - QLDB: __nccwpck_require__(71266), - QLDBSession: __nccwpck_require__(55423), - WorkMailMessageFlow: __nccwpck_require__(67025), - CodeStarNotifications: __nccwpck_require__(15141), - SavingsPlans: __nccwpck_require__(62825), - SSO: __nccwpck_require__(71096), - SSOOIDC: __nccwpck_require__(49870), - MarketplaceCatalog: __nccwpck_require__(2609), - DataExchange: __nccwpck_require__(11024), - SESV2: __nccwpck_require__(20142), - MigrationHubConfig: __nccwpck_require__(62658), - ConnectParticipant: __nccwpck_require__(94198), - AppConfig: __nccwpck_require__(78606), - IoTSecureTunneling: __nccwpck_require__(98562), - WAFV2: __nccwpck_require__(50353), - ElasticInference: __nccwpck_require__(37708), - Imagebuilder: __nccwpck_require__(57511), - Schemas: __nccwpck_require__(55713), - AccessAnalyzer: __nccwpck_require__(20940), - CodeGuruReviewer: __nccwpck_require__(60070), - CodeGuruProfiler: __nccwpck_require__(65704), - ComputeOptimizer: __nccwpck_require__(64459), - FraudDetector: __nccwpck_require__(99830), - Kendra: __nccwpck_require__(66122), - NetworkManager: __nccwpck_require__(37610), - Outposts: __nccwpck_require__(27551), - AugmentedAIRuntime: __nccwpck_require__(33960), - EBS: __nccwpck_require__(62837), - KinesisVideoSignalingChannels: __nccwpck_require__(12710), - Detective: __nccwpck_require__(60674), - CodeStarconnections: __nccwpck_require__(78270), - Synthetics: __nccwpck_require__(25910), - IoTSiteWise: __nccwpck_require__(89690), - Macie2: __nccwpck_require__(57330), - CodeArtifact: __nccwpck_require__(91983), - Honeycode: __nccwpck_require__(38889), - IVS: __nccwpck_require__(67701), - Braket: __nccwpck_require__(35429), - IdentityStore: __nccwpck_require__(60222), - Appflow: __nccwpck_require__(60844), - RedshiftData: __nccwpck_require__(203), - SSOAdmin: __nccwpck_require__(66644), - TimestreamQuery: __nccwpck_require__(24529), - TimestreamWrite: __nccwpck_require__(1573), - S3Outposts: __nccwpck_require__(90493), - DataBrew: __nccwpck_require__(35846), - ServiceCatalogAppRegistry: __nccwpck_require__(79068), - NetworkFirewall: __nccwpck_require__(84626), - MWAA: __nccwpck_require__(32712), - AmplifyBackend: __nccwpck_require__(2806), - AppIntegrations: __nccwpck_require__(85479), - ConnectContactLens: __nccwpck_require__(41847), - DevOpsGuru: __nccwpck_require__(90673), - ECRPUBLIC: __nccwpck_require__(90244), - LookoutVision: __nccwpck_require__(65046), - SageMakerFeatureStoreRuntime: __nccwpck_require__(67644), - CustomerProfiles: __nccwpck_require__(28379), - AuditManager: __nccwpck_require__(20472), - EMRcontainers: __nccwpck_require__(49984), - HealthLake: __nccwpck_require__(64254), - SagemakerEdge: __nccwpck_require__(38966), - Amp: __nccwpck_require__(96881), - GreengrassV2: __nccwpck_require__(45126), - IotDeviceAdvisor: __nccwpck_require__(97569), - IoTFleetHub: __nccwpck_require__(42513), - IoTWireless: __nccwpck_require__(8226), - Location: __nccwpck_require__(44594), - WellArchitected: __nccwpck_require__(86263), - LexModelsV2: __nccwpck_require__(27254), - LexRuntimeV2: __nccwpck_require__(33855), - Fis: __nccwpck_require__(73003), - LookoutMetrics: __nccwpck_require__(78708), - Mgn: __nccwpck_require__(41339), - LookoutEquipment: __nccwpck_require__(21843), - Nimble: __nccwpck_require__(89428), - Finspace: __nccwpck_require__(3052), - Finspacedata: __nccwpck_require__(96869), - SSMContacts: __nccwpck_require__(12577), - SSMIncidents: __nccwpck_require__(20590), - ApplicationCostProfiler: __nccwpck_require__(20887), - AppRunner: __nccwpck_require__(75589), - Proton: __nccwpck_require__(9275), - Route53RecoveryCluster: __nccwpck_require__(35738), - Route53RecoveryControlConfig: __nccwpck_require__(16063), - Route53RecoveryReadiness: __nccwpck_require__(79106), - ChimeSDKIdentity: __nccwpck_require__(55975), - ChimeSDKMessaging: __nccwpck_require__(25255), - SnowDeviceManagement: __nccwpck_require__(64655), - MemoryDB: __nccwpck_require__(50782), - OpenSearch: __nccwpck_require__(60358), - KafkaConnect: __nccwpck_require__(61879), - VoiceID: __nccwpck_require__(28747), - Wisdom: __nccwpck_require__(85266), - Account: __nccwpck_require__(32400), - CloudControl: __nccwpck_require__(25630), - Grafana: __nccwpck_require__(51050), - Panorama: __nccwpck_require__(20368), - ChimeSDKMeetings: __nccwpck_require__(80788), - Resiliencehub: __nccwpck_require__(21173), - MigrationHubStrategy: __nccwpck_require__(96533), - AppConfigData: __nccwpck_require__(45282), - Drs: __nccwpck_require__(41116), - MigrationHubRefactorSpaces: __nccwpck_require__(2925), - Evidently: __nccwpck_require__(21440), - Inspector2: __nccwpck_require__(98650), - Rbin: __nccwpck_require__(70145), - RUM: __nccwpck_require__(53237), - BackupGateway: __nccwpck_require__(68277), - IoTTwinMaker: __nccwpck_require__(65010), - WorkSpacesWeb: __nccwpck_require__(94124), - AmplifyUIBuilder: __nccwpck_require__(89937), - Keyspaces: __nccwpck_require__(24789), - Billingconductor: __nccwpck_require__(38416), - GameSparks: __nccwpck_require__(83025), - PinpointSMSVoiceV2: __nccwpck_require__(478), - Ivschat: __nccwpck_require__(17077), - ChimeSDKMediaPipelines: __nccwpck_require__(18423), - EMRServerless: __nccwpck_require__(219), - M2: __nccwpck_require__(22482), - ConnectCampaigns: __nccwpck_require__(42789), - RedshiftServerless: __nccwpck_require__(29987), - RolesAnywhere: __nccwpck_require__(83604), - LicenseManagerUserSubscriptions: __nccwpck_require__(37725), - BackupStorage: __nccwpck_require__(82304), - PrivateNetworks: __nccwpck_require__(63088), - SupportApp: __nccwpck_require__(51288), - ControlTower: __nccwpck_require__(77574), - IoTFleetWise: __nccwpck_require__(94329), - MigrationHubOrchestrator: __nccwpck_require__(66120), - ConnectCases: __nccwpck_require__(72223), - ResourceExplorer2: __nccwpck_require__(74071), - Scheduler: __nccwpck_require__(94840), - ChimeSDKVoice: __nccwpck_require__(349), - IoTRoboRunner: __nccwpck_require__(22163), - SsmSap: __nccwpck_require__(44552), - OAM: __nccwpck_require__(9319), - ARCZonalShift: __nccwpck_require__(54280), - Omics: __nccwpck_require__(75114), - OpenSearchServerless: __nccwpck_require__(86277), - SecurityLake: __nccwpck_require__(84296), - SimSpaceWeaver: __nccwpck_require__(37090), - DocDBElastic: __nccwpck_require__(20792), - SageMakerGeospatial: __nccwpck_require__(4707), - CodeCatalyst: __nccwpck_require__(19499), - Pipes: __nccwpck_require__(14220), - SageMakerMetrics: __nccwpck_require__(28199), - KinesisVideoWebRTCStorage: __nccwpck_require__(52642), - LicenseManagerLinuxSubscriptions: __nccwpck_require__(52687), - KendraRanking: __nccwpck_require__(46255), - CleanRooms: __nccwpck_require__(15130), - CloudTrailData: __nccwpck_require__(31191), - Tnb: __nccwpck_require__(15300), - InternetMonitor: __nccwpck_require__(84099), - IVSRealTime: __nccwpck_require__(51946), - VPCLattice: __nccwpck_require__(78952), - OSIS: __nccwpck_require__(98021), - MediaPackageV2: __nccwpck_require__(53264), - PaymentCryptography: __nccwpck_require__(11594), - PaymentCryptographyData: __nccwpck_require__(96559) -}; - -/***/ }), - -/***/ 96881: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['amp'] = {}; -AWS.Amp = Service.defineService('amp', ['2020-08-01']); -Object.defineProperty(apiLoader.services['amp'], '2020-08-01', { - get: function get() { - var model = __nccwpck_require__(78362); - model.paginators = (__nccwpck_require__(75928)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(58239)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +/***/ 41236: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = AWS.Amp; +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = __nccwpck_require__(43228); +Object.defineProperty(exports, "convertToBuffer", ({ enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } })); +var isEmptyData_1 = __nccwpck_require__(18275); +Object.defineProperty(exports, "isEmptyData", ({ enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } })); +var numToUint8_1 = __nccwpck_require__(93775); +Object.defineProperty(exports, "numToUint8", ({ enumerable: true, get: function () { return numToUint8_1.numToUint8; } })); +var uint32ArrayFrom_1 = __nccwpck_require__(39404); +Object.defineProperty(exports, "uint32ArrayFrom", ({ enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 38090: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['amplify'] = {}; -AWS.Amplify = Service.defineService('amplify', ['2017-07-25']); -Object.defineProperty(apiLoader.services['amplify'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(36813); - model.paginators = (__nccwpck_require__(53733)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +/***/ 18275: +/***/ ((__unused_webpack_module, exports) => { -module.exports = AWS.Amplify; +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map /***/ }), -/***/ 2806: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['amplifybackend'] = {}; -AWS.AmplifyBackend = Service.defineService('amplifybackend', ['2020-08-11']); -Object.defineProperty(apiLoader.services['amplifybackend'], '2020-08-11', { - get: function get() { - var model = __nccwpck_require__(23939); - model.paginators = (__nccwpck_require__(27232)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +/***/ 93775: +/***/ ((__unused_webpack_module, exports) => { -module.exports = AWS.AmplifyBackend; +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map /***/ }), -/***/ 89937: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['amplifyuibuilder'] = {}; -AWS.AmplifyUIBuilder = Service.defineService('amplifyuibuilder', ['2021-08-11']); -Object.defineProperty(apiLoader.services['amplifyuibuilder'], '2021-08-11', { - get: function get() { - var model = __nccwpck_require__(48987); - model.paginators = (__nccwpck_require__(56072)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(70564)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +/***/ 39404: +/***/ ((__unused_webpack_module, exports) => { -module.exports = AWS.AmplifyUIBuilder; +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map /***/ }), -/***/ 91759: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 69838: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['apigateway'] = {}; -AWS.APIGateway = Service.defineService('apigateway', ['2015-07-09']); -__nccwpck_require__(4338); -Object.defineProperty(apiLoader.services['apigateway'], '2015-07-09', { - get: function get() { - var model = __nccwpck_require__(59463); - model.paginators = (__nccwpck_require__(25878)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.APIGateway; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSO = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const GetRoleCredentialsCommand_1 = __nccwpck_require__(18972); +const ListAccountRolesCommand_1 = __nccwpck_require__(1513); +const ListAccountsCommand_1 = __nccwpck_require__(64296); +const LogoutCommand_1 = __nccwpck_require__(12586); +const SSOClient_1 = __nccwpck_require__(71057); +const commands = { + GetRoleCredentialsCommand: GetRoleCredentialsCommand_1.GetRoleCredentialsCommand, + ListAccountRolesCommand: ListAccountRolesCommand_1.ListAccountRolesCommand, + ListAccountsCommand: ListAccountsCommand_1.ListAccountsCommand, + LogoutCommand: LogoutCommand_1.LogoutCommand, +}; +class SSO extends SSOClient_1.SSOClient { +} +exports.SSO = SSO; +(0, smithy_client_1.createAggregatedClient)(commands, SSO); /***/ }), -/***/ 31762: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['apigatewaymanagementapi'] = {}; -AWS.ApiGatewayManagementApi = Service.defineService('apigatewaymanagementapi', ['2018-11-29']); -Object.defineProperty(apiLoader.services['apigatewaymanagementapi'], '2018-11-29', { - get: function get() { - var model = __nccwpck_require__(57832); - model.paginators = (__nccwpck_require__(2787)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.ApiGatewayManagementApi; - +/***/ 71057: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +"use strict"; -/***/ 44987: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOClient = exports.__Client = void 0; +const middleware_host_header_1 = __nccwpck_require__(22545); +const middleware_logger_1 = __nccwpck_require__(20014); +const middleware_recursion_detection_1 = __nccwpck_require__(85525); +const middleware_user_agent_1 = __nccwpck_require__(64688); +const config_resolver_1 = __nccwpck_require__(53098); +const middleware_content_length_1 = __nccwpck_require__(82800); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_retry_1 = __nccwpck_require__(96039); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "__Client", ({ enumerable: true, get: function () { return smithy_client_1.Client; } })); +const EndpointParameters_1 = __nccwpck_require__(34214); +const runtimeConfig_1 = __nccwpck_require__(19756); +class SSOClient extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.SSOClient = SSOClient; + + +/***/ }), + +/***/ 18972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['apigatewayv2'] = {}; -AWS.ApiGatewayV2 = Service.defineService('apigatewayv2', ['2018-11-29']); -Object.defineProperty(apiLoader.services['apigatewayv2'], '2018-11-29', { - get: function get() { - var model = __nccwpck_require__(59326); - model.paginators = (__nccwpck_require__(90171)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ApiGatewayV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetRoleCredentialsCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(66390); +const Aws_restJson1_1 = __nccwpck_require__(98507); +class GetRoleCredentialsCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetRoleCredentialsCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "GetRoleCredentialsCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.GetRoleCredentialsRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.GetRoleCredentialsResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_GetRoleCredentialsCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_GetRoleCredentialsCommand)(output, context); + } +} +exports.GetRoleCredentialsCommand = GetRoleCredentialsCommand; /***/ }), -/***/ 78606: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1513: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appconfig'] = {}; -AWS.AppConfig = Service.defineService('appconfig', ['2019-10-09']); -Object.defineProperty(apiLoader.services['appconfig'], '2019-10-09', { - get: function get() { - var model = __nccwpck_require__(44701); - model.paginators = (__nccwpck_require__(41789)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppConfig; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ListAccountRolesCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(66390); +const Aws_restJson1_1 = __nccwpck_require__(98507); +class ListAccountRolesCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ListAccountRolesCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "ListAccountRolesCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.ListAccountRolesRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_ListAccountRolesCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_ListAccountRolesCommand)(output, context); + } +} +exports.ListAccountRolesCommand = ListAccountRolesCommand; /***/ }), -/***/ 45282: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 64296: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appconfigdata'] = {}; -AWS.AppConfigData = Service.defineService('appconfigdata', ['2021-11-11']); -Object.defineProperty(apiLoader.services['appconfigdata'], '2021-11-11', { - get: function get() { - var model = __nccwpck_require__(86796); - model.paginators = (__nccwpck_require__(48010)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppConfigData; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ListAccountsCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(66390); +const Aws_restJson1_1 = __nccwpck_require__(98507); +class ListAccountsCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ListAccountsCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "ListAccountsCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.ListAccountsRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_ListAccountsCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_ListAccountsCommand)(output, context); + } +} +exports.ListAccountsCommand = ListAccountsCommand; /***/ }), -/***/ 60844: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 12586: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appflow'] = {}; -AWS.Appflow = Service.defineService('appflow', ['2020-08-23']); -Object.defineProperty(apiLoader.services['appflow'], '2020-08-23', { - get: function get() { - var model = __nccwpck_require__(32840); - model.paginators = (__nccwpck_require__(16916)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Appflow; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LogoutCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(66390); +const Aws_restJson1_1 = __nccwpck_require__(98507); +class LogoutCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, LogoutCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOClient"; + const commandName = "LogoutCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.LogoutRequestFilterSensitiveLog, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_restJson1_1.se_LogoutCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_restJson1_1.de_LogoutCommand)(output, context); + } +} +exports.LogoutCommand = LogoutCommand; /***/ }), -/***/ 85479: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 65706: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appintegrations'] = {}; -AWS.AppIntegrations = Service.defineService('appintegrations', ['2020-07-29']); -Object.defineProperty(apiLoader.services['appintegrations'], '2020-07-29', { - get: function get() { - var model = __nccwpck_require__(62033); - model.paginators = (__nccwpck_require__(61866)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppIntegrations; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(18972), exports); +tslib_1.__exportStar(__nccwpck_require__(1513), exports); +tslib_1.__exportStar(__nccwpck_require__(64296), exports); +tslib_1.__exportStar(__nccwpck_require__(12586), exports); /***/ }), -/***/ 25598: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 34214: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['applicationautoscaling'] = {}; -AWS.ApplicationAutoScaling = Service.defineService('applicationautoscaling', ['2016-02-06']); -Object.defineProperty(apiLoader.services['applicationautoscaling'], '2016-02-06', { - get: function get() { - var model = __nccwpck_require__(47320); - model.paginators = (__nccwpck_require__(40322)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ApplicationAutoScaling; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }; +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; /***/ }), -/***/ 20887: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 30898: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['applicationcostprofiler'] = {}; -AWS.ApplicationCostProfiler = Service.defineService('applicationcostprofiler', ['2020-09-10']); -Object.defineProperty(apiLoader.services['applicationcostprofiler'], '2020-09-10', { - get: function get() { - var model = __nccwpck_require__(96818); - model.paginators = (__nccwpck_require__(41331)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ApplicationCostProfiler; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(13350); +const ruleset_1 = __nccwpck_require__(13341); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + }); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; /***/ }), -/***/ 83972: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 13341: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['applicationinsights'] = {}; -AWS.ApplicationInsights = Service.defineService('applicationinsights', ['2018-11-25']); -Object.defineProperty(apiLoader.services['applicationinsights'], '2018-11-25', { - get: function get() { - var model = __nccwpck_require__(96143); - model.paginators = (__nccwpck_require__(22242)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ApplicationInsights; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const p = "required", q = "fn", r = "argv", s = "ref"; +const a = "PartitionResult", b = "tree", c = "error", d = "endpoint", e = { [p]: false, "type": "String" }, f = { [p]: true, "default": false, "type": "Boolean" }, g = { [s]: "Endpoint" }, h = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }, i = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }, j = {}, k = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsFIPS"] }] }, l = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsDualStack"] }] }, m = [g], n = [h], o = [i]; +const _data = { version: "1.0", parameters: { Region: e, UseDualStack: f, UseFIPS: f, Endpoint: e }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: a }], type: b, rules: [{ conditions: [{ [q]: "isSet", [r]: m }, { [q]: "parseURL", [r]: m, assign: "url" }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: g, properties: j, headers: j }, type: d }] }] }, { conditions: [h, i], type: b, rules: [{ conditions: [k, l], type: b, rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [k], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [l], type: b, rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }; +exports.ruleSet = _data; /***/ }), -/***/ 69226: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82666: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appmesh'] = {}; -AWS.AppMesh = Service.defineService('appmesh', ['2018-10-01', '2018-10-01*', '2019-01-25']); -Object.defineProperty(apiLoader.services['appmesh'], '2018-10-01', { - get: function get() { - var model = __nccwpck_require__(64780); - model.paginators = (__nccwpck_require__(54936)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['appmesh'], '2019-01-25', { - get: function get() { - var model = __nccwpck_require__(78066); - model.paginators = (__nccwpck_require__(37698)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppMesh; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOServiceException = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(71057), exports); +tslib_1.__exportStar(__nccwpck_require__(69838), exports); +tslib_1.__exportStar(__nccwpck_require__(65706), exports); +tslib_1.__exportStar(__nccwpck_require__(36773), exports); +tslib_1.__exportStar(__nccwpck_require__(14952), exports); +var SSOServiceException_1 = __nccwpck_require__(81517); +Object.defineProperty(exports, "SSOServiceException", ({ enumerable: true, get: function () { return SSOServiceException_1.SSOServiceException; } })); /***/ }), -/***/ 75589: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81517: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['apprunner'] = {}; -AWS.AppRunner = Service.defineService('apprunner', ['2020-05-15']); -Object.defineProperty(apiLoader.services['apprunner'], '2020-05-15', { - get: function get() { - var model = __nccwpck_require__(30036); - model.paginators = (__nccwpck_require__(50293)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppRunner; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SSOServiceException = exports.__ServiceException = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "__ServiceException", ({ enumerable: true, get: function () { return smithy_client_1.ServiceException; } })); +class SSOServiceException extends smithy_client_1.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} +exports.SSOServiceException = SSOServiceException; /***/ }), -/***/ 21730: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 14952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appstream'] = {}; -AWS.AppStream = Service.defineService('appstream', ['2016-12-01']); -Object.defineProperty(apiLoader.services['appstream'], '2016-12-01', { - get: function get() { - var model = __nccwpck_require__(85538); - model.paginators = (__nccwpck_require__(32191)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(21134)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AppStream; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(66390), exports); /***/ }), -/***/ 12402: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['appsync'] = {}; -AWS.AppSync = Service.defineService('appsync', ['2017-07-25']); -Object.defineProperty(apiLoader.services['appsync'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(94937); - model.paginators = (__nccwpck_require__(50233)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.AppSync; - - -/***/ }), +/***/ 66390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ 54280: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['arczonalshift'] = {}; -AWS.ARCZonalShift = Service.defineService('arczonalshift', ['2022-10-30']); -Object.defineProperty(apiLoader.services['arczonalshift'], '2022-10-30', { - get: function get() { - var model = __nccwpck_require__(52286); - model.paginators = (__nccwpck_require__(70002)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LogoutRequestFilterSensitiveLog = exports.ListAccountsRequestFilterSensitiveLog = exports.ListAccountRolesRequestFilterSensitiveLog = exports.GetRoleCredentialsResponseFilterSensitiveLog = exports.RoleCredentialsFilterSensitiveLog = exports.GetRoleCredentialsRequestFilterSensitiveLog = exports.UnauthorizedException = exports.TooManyRequestsException = exports.ResourceNotFoundException = exports.InvalidRequestException = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const SSOServiceException_1 = __nccwpck_require__(81517); +class InvalidRequestException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +exports.InvalidRequestException = InvalidRequestException; +class ResourceNotFoundException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + this.name = "ResourceNotFoundException"; + this.$fault = "client"; + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +exports.ResourceNotFoundException = ResourceNotFoundException; +class TooManyRequestsException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + this.name = "TooManyRequestsException"; + this.$fault = "client"; + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +exports.TooManyRequestsException = TooManyRequestsException; +class UnauthorizedException extends SSOServiceException_1.SSOServiceException { + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + this.name = "UnauthorizedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +exports.UnauthorizedException = UnauthorizedException; +const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), }); - -module.exports = AWS.ARCZonalShift; +exports.GetRoleCredentialsRequestFilterSensitiveLog = GetRoleCredentialsRequestFilterSensitiveLog; +const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: smithy_client_1.SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.RoleCredentialsFilterSensitiveLog = RoleCredentialsFilterSensitiveLog; +const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: (0, exports.RoleCredentialsFilterSensitiveLog)(obj.roleCredentials) }), +}); +exports.GetRoleCredentialsResponseFilterSensitiveLog = GetRoleCredentialsResponseFilterSensitiveLog; +const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.ListAccountRolesRequestFilterSensitiveLog = ListAccountRolesRequestFilterSensitiveLog; +const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.ListAccountsRequestFilterSensitiveLog = ListAccountsRequestFilterSensitiveLog; +const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: smithy_client_1.SENSITIVE_STRING }), +}); +exports.LogoutRequestFilterSensitiveLog = LogoutRequestFilterSensitiveLog; /***/ }), -/***/ 29434: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80849: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['athena'] = {}; -AWS.Athena = Service.defineService('athena', ['2017-05-18']); -Object.defineProperty(apiLoader.services['athena'], '2017-05-18', { - get: function get() { - var model = __nccwpck_require__(28680); - model.paginators = (__nccwpck_require__(44417)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Athena; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 20472: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 88460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['auditmanager'] = {}; -AWS.AuditManager = Service.defineService('auditmanager', ['2017-07-25']); -Object.defineProperty(apiLoader.services['auditmanager'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(41672); - model.paginators = (__nccwpck_require__(41321)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AuditManager; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.paginateListAccountRoles = void 0; +const ListAccountRolesCommand_1 = __nccwpck_require__(1513); +const SSOClient_1 = __nccwpck_require__(71057); +const makePagedClientRequest = async (client, input, ...args) => { + return await client.send(new ListAccountRolesCommand_1.ListAccountRolesCommand(input), ...args); +}; +async function* paginateListAccountRoles(config, input, ...additionalArguments) { + let token = config.startingToken || undefined; + let hasNext = true; + let page; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof SSOClient_1.SSOClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } + else { + throw new Error("Invalid client, expected SSO | SSOClient"); + } + yield page; + const prevToken = token; + token = page.nextToken; + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; +} +exports.paginateListAccountRoles = paginateListAccountRoles; /***/ }), -/***/ 33960: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 50938: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['augmentedairuntime'] = {}; -AWS.AugmentedAIRuntime = Service.defineService('augmentedairuntime', ['2019-11-07']); -Object.defineProperty(apiLoader.services['augmentedairuntime'], '2019-11-07', { - get: function get() { - var model = __nccwpck_require__(57704); - model.paginators = (__nccwpck_require__(13201)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AugmentedAIRuntime; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.paginateListAccounts = void 0; +const ListAccountsCommand_1 = __nccwpck_require__(64296); +const SSOClient_1 = __nccwpck_require__(71057); +const makePagedClientRequest = async (client, input, ...args) => { + return await client.send(new ListAccountsCommand_1.ListAccountsCommand(input), ...args); +}; +async function* paginateListAccounts(config, input, ...additionalArguments) { + let token = config.startingToken || undefined; + let hasNext = true; + let page; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof SSOClient_1.SSOClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } + else { + throw new Error("Invalid client, expected SSO | SSOClient"); + } + yield page; + const prevToken = token; + token = page.nextToken; + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; +} +exports.paginateListAccounts = paginateListAccounts; /***/ }), -/***/ 31652: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 36773: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['autoscaling'] = {}; -AWS.AutoScaling = Service.defineService('autoscaling', ['2011-01-01']); -Object.defineProperty(apiLoader.services['autoscaling'], '2011-01-01', { - get: function get() { - var model = __nccwpck_require__(55394); - model.paginators = (__nccwpck_require__(81436)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.AutoScaling; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80849), exports); +tslib_1.__exportStar(__nccwpck_require__(88460), exports); +tslib_1.__exportStar(__nccwpck_require__(50938), exports); /***/ }), -/***/ 2554: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['autoscalingplans'] = {}; -AWS.AutoScalingPlans = Service.defineService('autoscalingplans', ['2018-01-06']); -Object.defineProperty(apiLoader.services['autoscalingplans'], '2018-01-06', { - get: function get() { - var model = __nccwpck_require__(53216); - model.paginators = (__nccwpck_require__(64985)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.AutoScalingPlans; - - -/***/ }), - -/***/ 82455: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['backup'] = {}; -AWS.Backup = Service.defineService('backup', ['2018-11-15']); -Object.defineProperty(apiLoader.services['backup'], '2018-11-15', { - get: function get() { - var model = __nccwpck_require__(77990); - model.paginators = (__nccwpck_require__(54869)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.de_LogoutCommand = exports.de_ListAccountsCommand = exports.de_ListAccountRolesCommand = exports.de_GetRoleCredentialsCommand = exports.se_LogoutCommand = exports.se_ListAccountsCommand = exports.se_ListAccountRolesCommand = exports.se_GetRoleCredentialsCommand = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const smithy_client_1 = __nccwpck_require__(63570); +const models_0_1 = __nccwpck_require__(66390); +const SSOServiceException_1 = __nccwpck_require__(81517); +const se_GetRoleCredentialsCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/federation/credentials"; + const query = (0, smithy_client_1.map)({ + role_name: [, (0, smithy_client_1.expectNonNull)(input.roleName, `roleName`)], + account_id: [, (0, smithy_client_1.expectNonNull)(input.accountId, `accountId`)], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_GetRoleCredentialsCommand = se_GetRoleCredentialsCommand; +const se_ListAccountRolesCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/assignment/roles"; + const query = (0, smithy_client_1.map)({ + next_token: [, input.nextToken], + max_result: [() => input.maxResults !== void 0, () => input.maxResults.toString()], + account_id: [, (0, smithy_client_1.expectNonNull)(input.accountId, `accountId`)], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_ListAccountRolesCommand = se_ListAccountRolesCommand; +const se_ListAccountsCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/assignment/accounts"; + const query = (0, smithy_client_1.map)({ + next_token: [, input.nextToken], + max_result: [() => input.maxResults !== void 0, () => input.maxResults.toString()], + }); + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; +exports.se_ListAccountsCommand = se_ListAccountsCommand; +const se_LogoutCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = (0, smithy_client_1.map)({}, isSerializableHeaderValue, { + "x-amz-sso_bearer_token": input.accessToken, + }); + const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/logout"; + let body; + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; +exports.se_LogoutCommand = se_LogoutCommand; +const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_GetRoleCredentialsCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + roleCredentials: smithy_client_1._json, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_GetRoleCredentialsCommand = de_GetRoleCredentialsCommand; +const de_GetRoleCredentialsCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_ListAccountRolesCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + nextToken: smithy_client_1.expectString, + roleList: smithy_client_1._json, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_ListAccountRolesCommand = de_ListAccountRolesCommand; +const de_ListAccountRolesCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_ListAccountsCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + const data = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_1.take)(data, { + accountList: smithy_client_1._json, + nextToken: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + return contents; +}; +exports.de_ListAccountsCommand = de_ListAccountsCommand; +const de_ListAccountsCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_LogoutCommandError(output, context); + } + const contents = (0, smithy_client_1.map)({ + $metadata: deserializeMetadata(output), + }); + await (0, smithy_client_1.collectBody)(output.body, context); + return contents; +}; +exports.de_LogoutCommand = de_LogoutCommand; +const de_LogoutCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = (0, smithy_client_1.withBaseException)(SSOServiceException_1.SSOServiceException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_1.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_1.take)(data, { + message: smithy_client_1.expectString, + }); + Object.assign(contents, doc); + const exception = new models_0_1.UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return (0, smithy_client_1.decorateServiceException)(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => (0, smithy_client_1.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +const isSerializableHeaderValue = (value) => value !== undefined && + value !== null && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); +const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; }); - -module.exports = AWS.Backup; +const parseErrorBody = async (errorBody, context) => { + const value = await parseBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; /***/ }), -/***/ 68277: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 19756: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['backupgateway'] = {}; -AWS.BackupGateway = Service.defineService('backupgateway', ['2021-01-01']); -Object.defineProperty(apiLoader.services['backupgateway'], '2021-01-01', { - get: function get() { - var model = __nccwpck_require__(96863); - model.paginators = (__nccwpck_require__(34946)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.BackupGateway; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(4351); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(91092)); +const util_user_agent_node_1 = __nccwpck_require__(98095); +const config_resolver_1 = __nccwpck_require__(53098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_1 = __nccwpck_require__(96039); +const node_config_provider_1 = __nccwpck_require__(33461); +const node_http_handler_1 = __nccwpck_require__(20258); +const util_body_length_node_1 = __nccwpck_require__(68075); +const util_retry_1 = __nccwpck_require__(84902); +const runtimeConfig_shared_1 = __nccwpck_require__(44809); +const smithy_client_1 = __nccwpck_require__(63570); +const util_defaults_mode_node_1 = __nccwpck_require__(72429); +const smithy_client_2 = __nccwpck_require__(63570); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: config?.region ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: config?.requestHandler ?? new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; /***/ }), -/***/ 82304: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['backupstorage'] = {}; -AWS.BackupStorage = Service.defineService('backupstorage', ['2018-04-10']); -Object.defineProperty(apiLoader.services['backupstorage'], '2018-04-10', { - get: function get() { - var model = __nccwpck_require__(97436); - model.paginators = (__nccwpck_require__(73644)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +/***/ 44809: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = AWS.BackupStorage; +"use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const url_parser_1 = __nccwpck_require__(14681); +const util_base64_1 = __nccwpck_require__(75600); +const util_utf8_1 = __nccwpck_require__(41895); +const endpointResolver_1 = __nccwpck_require__(30898); +const getRuntimeConfig = (config) => ({ + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, +}); +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 32605: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +"use strict"; -/***/ 10000: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STS = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const AssumeRoleCommand_1 = __nccwpck_require__(59802); +const AssumeRoleWithSAMLCommand_1 = __nccwpck_require__(72865); +const AssumeRoleWithWebIdentityCommand_1 = __nccwpck_require__(37451); +const DecodeAuthorizationMessageCommand_1 = __nccwpck_require__(74150); +const GetAccessKeyInfoCommand_1 = __nccwpck_require__(49804); +const GetCallerIdentityCommand_1 = __nccwpck_require__(24278); +const GetFederationTokenCommand_1 = __nccwpck_require__(57552); +const GetSessionTokenCommand_1 = __nccwpck_require__(43285); +const STSClient_1 = __nccwpck_require__(64195); +const commands = { + AssumeRoleCommand: AssumeRoleCommand_1.AssumeRoleCommand, + AssumeRoleWithSAMLCommand: AssumeRoleWithSAMLCommand_1.AssumeRoleWithSAMLCommand, + AssumeRoleWithWebIdentityCommand: AssumeRoleWithWebIdentityCommand_1.AssumeRoleWithWebIdentityCommand, + DecodeAuthorizationMessageCommand: DecodeAuthorizationMessageCommand_1.DecodeAuthorizationMessageCommand, + GetAccessKeyInfoCommand: GetAccessKeyInfoCommand_1.GetAccessKeyInfoCommand, + GetCallerIdentityCommand: GetCallerIdentityCommand_1.GetCallerIdentityCommand, + GetFederationTokenCommand: GetFederationTokenCommand_1.GetFederationTokenCommand, + GetSessionTokenCommand: GetSessionTokenCommand_1.GetSessionTokenCommand, +}; +class STS extends STSClient_1.STSClient { +} +exports.STS = STS; +(0, smithy_client_1.createAggregatedClient)(commands, STS); -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['batch'] = {}; -AWS.Batch = Service.defineService('batch', ['2016-08-10']); -Object.defineProperty(apiLoader.services['batch'], '2016-08-10', { - get: function get() { - var model = __nccwpck_require__(12617); - model.paginators = (__nccwpck_require__(36988)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -module.exports = AWS.Batch; +/***/ }), +/***/ 64195: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +"use strict"; -/***/ 38416: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = __nccwpck_require__(22545); +const middleware_logger_1 = __nccwpck_require__(20014); +const middleware_recursion_detection_1 = __nccwpck_require__(85525); +const middleware_sdk_sts_1 = __nccwpck_require__(55959); +const middleware_user_agent_1 = __nccwpck_require__(64688); +const config_resolver_1 = __nccwpck_require__(53098); +const middleware_content_length_1 = __nccwpck_require__(82800); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_retry_1 = __nccwpck_require__(96039); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "__Client", ({ enumerable: true, get: function () { return smithy_client_1.Client; } })); +const EndpointParameters_1 = __nccwpck_require__(20510); +const runtimeConfig_1 = __nccwpck_require__(83405); +class STSClient extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_sdk_sts_1.resolveStsAuthConfig)(_config_5, { stsClientCtor: STSClient }); + const _config_7 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_6); + super(_config_7); + this.config = _config_7; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; + + +/***/ }), + +/***/ 59802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['billingconductor'] = {}; -AWS.Billingconductor = Service.defineService('billingconductor', ['2021-07-30']); -Object.defineProperty(apiLoader.services['billingconductor'], '2021-07-30', { - get: function get() { - var model = __nccwpck_require__(54862); - model.paginators = (__nccwpck_require__(97894)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(64224)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Billingconductor; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(21780); +const Aws_query_1 = __nccwpck_require__(10740); +class AssumeRoleCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.AssumeRoleResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleCommand)(output, context); + } +} +exports.AssumeRoleCommand = AssumeRoleCommand; /***/ }), -/***/ 35429: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 72865: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['braket'] = {}; -AWS.Braket = Service.defineService('braket', ['2019-09-01']); -Object.defineProperty(apiLoader.services['braket'], '2019-09-01', { - get: function get() { - var model = __nccwpck_require__(23332); - model.paginators = (__nccwpck_require__(15732)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Braket; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleWithSAMLCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(21780); +const Aws_query_1 = __nccwpck_require__(10740); +class AssumeRoleWithSAMLCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleWithSAMLCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleWithSAMLCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.AssumeRoleWithSAMLRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.AssumeRoleWithSAMLResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleWithSAMLCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleWithSAMLCommand)(output, context); + } +} +exports.AssumeRoleWithSAMLCommand = AssumeRoleWithSAMLCommand; /***/ }), -/***/ 43923: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37451: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['budgets'] = {}; -AWS.Budgets = Service.defineService('budgets', ['2016-10-20']); -Object.defineProperty(apiLoader.services['budgets'], '2016-10-20', { - get: function get() { - var model = __nccwpck_require__(11978); - model.paginators = (__nccwpck_require__(23694)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Budgets; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AssumeRoleWithWebIdentityCommand = exports.$Command = void 0; +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(21780); +const Aws_query_1 = __nccwpck_require__(10740); +class AssumeRoleWithWebIdentityCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, AssumeRoleWithWebIdentityCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "AssumeRoleWithWebIdentityCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: models_0_1.AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + outputFilterSensitiveLog: models_0_1.AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_AssumeRoleWithWebIdentityCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_AssumeRoleWithWebIdentityCommand)(output, context); + } +} +exports.AssumeRoleWithWebIdentityCommand = AssumeRoleWithWebIdentityCommand; /***/ }), -/***/ 84646: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74150: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chime'] = {}; -AWS.Chime = Service.defineService('chime', ['2018-05-01']); -Object.defineProperty(apiLoader.services['chime'], '2018-05-01', { - get: function get() { - var model = __nccwpck_require__(44811); - model.paginators = (__nccwpck_require__(31890)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Chime; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DecodeAuthorizationMessageCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(10740); +class DecodeAuthorizationMessageCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, DecodeAuthorizationMessageCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "DecodeAuthorizationMessageCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_DecodeAuthorizationMessageCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_DecodeAuthorizationMessageCommand)(output, context); + } +} +exports.DecodeAuthorizationMessageCommand = DecodeAuthorizationMessageCommand; /***/ }), -/***/ 55975: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 49804: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chimesdkidentity'] = {}; -AWS.ChimeSDKIdentity = Service.defineService('chimesdkidentity', ['2021-04-20']); -Object.defineProperty(apiLoader.services['chimesdkidentity'], '2021-04-20', { - get: function get() { - var model = __nccwpck_require__(97402); - model.paginators = (__nccwpck_require__(133)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ChimeSDKIdentity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetAccessKeyInfoCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(10740); +class GetAccessKeyInfoCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetAccessKeyInfoCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetAccessKeyInfoCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetAccessKeyInfoCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetAccessKeyInfoCommand)(output, context); + } +} +exports.GetAccessKeyInfoCommand = GetAccessKeyInfoCommand; /***/ }), -/***/ 18423: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 24278: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chimesdkmediapipelines'] = {}; -AWS.ChimeSDKMediaPipelines = Service.defineService('chimesdkmediapipelines', ['2021-07-15']); -Object.defineProperty(apiLoader.services['chimesdkmediapipelines'], '2021-07-15', { - get: function get() { - var model = __nccwpck_require__(14679); - model.paginators = (__nccwpck_require__(82201)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ChimeSDKMediaPipelines; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetCallerIdentityCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const Aws_query_1 = __nccwpck_require__(10740); +class GetCallerIdentityCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetCallerIdentityCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetCallerIdentityCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetCallerIdentityCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetCallerIdentityCommand)(output, context); + } +} +exports.GetCallerIdentityCommand = GetCallerIdentityCommand; /***/ }), -/***/ 80788: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57552: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chimesdkmeetings'] = {}; -AWS.ChimeSDKMeetings = Service.defineService('chimesdkmeetings', ['2021-07-15']); -Object.defineProperty(apiLoader.services['chimesdkmeetings'], '2021-07-15', { - get: function get() { - var model = __nccwpck_require__(17090); - model.paginators = (__nccwpck_require__(70582)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ChimeSDKMeetings; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetFederationTokenCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(21780); +const Aws_query_1 = __nccwpck_require__(10740); +class GetFederationTokenCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetFederationTokenCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetFederationTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.GetFederationTokenResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetFederationTokenCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetFederationTokenCommand)(output, context); + } +} +exports.GetFederationTokenCommand = GetFederationTokenCommand; /***/ }), -/***/ 25255: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 43285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chimesdkmessaging'] = {}; -AWS.ChimeSDKMessaging = Service.defineService('chimesdkmessaging', ['2021-05-15']); -Object.defineProperty(apiLoader.services['chimesdkmessaging'], '2021-05-15', { - get: function get() { - var model = __nccwpck_require__(52239); - model.paginators = (__nccwpck_require__(60807)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ChimeSDKMessaging; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetSessionTokenCommand = exports.$Command = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "$Command", ({ enumerable: true, get: function () { return smithy_client_1.Command; } })); +const models_0_1 = __nccwpck_require__(21780); +const Aws_query_1 = __nccwpck_require__(10740); +class GetSessionTokenCommand extends smithy_client_1.Command { + static getEndpointParameterInstructions() { + return { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + }; + } + constructor(input) { + super(); + this.input = input; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, GetSessionTokenCommand.getEndpointParameterInstructions())); + this.middlewareStack.use((0, middleware_signing_1.getAwsAuthPlugin)(configuration)); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "STSClient"; + const commandName = "GetSessionTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: models_0_1.GetSessionTokenResponseFilterSensitiveLog, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return (0, Aws_query_1.se_GetSessionTokenCommand)(input, context); + } + deserialize(output, context) { + return (0, Aws_query_1.de_GetSessionTokenCommand)(output, context); + } +} +exports.GetSessionTokenCommand = GetSessionTokenCommand; /***/ }), -/***/ 349: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55716: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['chimesdkvoice'] = {}; -AWS.ChimeSDKVoice = Service.defineService('chimesdkvoice', ['2022-08-03']); -Object.defineProperty(apiLoader.services['chimesdkvoice'], '2022-08-03', { - get: function get() { - var model = __nccwpck_require__(26420); - model.paginators = (__nccwpck_require__(7986)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ChimeSDKVoice; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(59802), exports); +tslib_1.__exportStar(__nccwpck_require__(72865), exports); +tslib_1.__exportStar(__nccwpck_require__(37451), exports); +tslib_1.__exportStar(__nccwpck_require__(74150), exports); +tslib_1.__exportStar(__nccwpck_require__(49804), exports); +tslib_1.__exportStar(__nccwpck_require__(24278), exports); +tslib_1.__exportStar(__nccwpck_require__(57552), exports); +tslib_1.__exportStar(__nccwpck_require__(43285), exports); /***/ }), -/***/ 15130: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 88028: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cleanrooms'] = {}; -AWS.CleanRooms = Service.defineService('cleanrooms', ['2022-02-17']); -Object.defineProperty(apiLoader.services['cleanrooms'], '2022-02-17', { - get: function get() { - var model = __nccwpck_require__(11585); - model.paginators = (__nccwpck_require__(73060)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(29284)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CleanRooms; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateDefaultCredentialProvider = exports.getDefaultRoleAssumerWithWebIdentity = exports.getDefaultRoleAssumer = void 0; +const defaultStsRoleAssumers_1 = __nccwpck_require__(90048); +const STSClient_1 = __nccwpck_require__(64195); +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => (0, defaultStsRoleAssumers_1.getDefaultRoleAssumer)(stsOptions, getCustomizableStsClientCtor(STSClient_1.STSClient, stsPlugins)); +exports.getDefaultRoleAssumer = getDefaultRoleAssumer; +const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => (0, defaultStsRoleAssumers_1.getDefaultRoleAssumerWithWebIdentity)(stsOptions, getCustomizableStsClientCtor(STSClient_1.STSClient, stsPlugins)); +exports.getDefaultRoleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity; +const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: (0, exports.getDefaultRoleAssumer)(input), + roleAssumerWithWebIdentity: (0, exports.getDefaultRoleAssumerWithWebIdentity)(input), + ...input, +}); +exports.decorateDefaultCredentialProvider = decorateDefaultCredentialProvider; /***/ }), -/***/ 85473: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloud9'] = {}; -AWS.Cloud9 = Service.defineService('cloud9', ['2017-09-23']); -Object.defineProperty(apiLoader.services['cloud9'], '2017-09-23', { - get: function get() { - var model = __nccwpck_require__(82981); - model.paginators = (__nccwpck_require__(9313)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.Cloud9; +/***/ 90048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; -/***/ }), - -/***/ 25630: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudcontrol'] = {}; -AWS.CloudControl = Service.defineService('cloudcontrol', ['2021-09-30']); -Object.defineProperty(apiLoader.services['cloudcontrol'], '2021-09-30', { - get: function get() { - var model = __nccwpck_require__(24689); - model.paginators = (__nccwpck_require__(16041)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(31933)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudControl; - - -/***/ }), - -/***/ 56231: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['clouddirectory'] = {}; -AWS.CloudDirectory = Service.defineService('clouddirectory', ['2016-05-10', '2016-05-10*', '2017-01-11']); -Object.defineProperty(apiLoader.services['clouddirectory'], '2016-05-10', { - get: function get() { - var model = __nccwpck_require__(72862); - model.paginators = (__nccwpck_require__(87597)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['clouddirectory'], '2017-01-11', { - get: function get() { - var model = __nccwpck_require__(88729); - model.paginators = (__nccwpck_require__(10156)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudDirectory; - - -/***/ }), - -/***/ 74643: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudformation'] = {}; -AWS.CloudFormation = Service.defineService('cloudformation', ['2010-05-15']); -Object.defineProperty(apiLoader.services['cloudformation'], '2010-05-15', { - get: function get() { - var model = __nccwpck_require__(31930); - model.paginators = (__nccwpck_require__(10611)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(53732)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudFormation; - - -/***/ }), - -/***/ 48058: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudfront'] = {}; -AWS.CloudFront = Service.defineService('cloudfront', ['2013-05-12*', '2013-11-11*', '2014-05-31*', '2014-10-21*', '2014-11-06*', '2015-04-17*', '2015-07-27*', '2015-09-17*', '2016-01-13*', '2016-01-28*', '2016-08-01*', '2016-08-20*', '2016-09-07*', '2016-09-29*', '2016-11-25', '2016-11-25*', '2017-03-25', '2017-03-25*', '2017-10-30', '2017-10-30*', '2018-06-18', '2018-06-18*', '2018-11-05', '2018-11-05*', '2019-03-26', '2019-03-26*', '2020-05-31']); -__nccwpck_require__(95483); -Object.defineProperty(apiLoader.services['cloudfront'], '2016-11-25', { - get: function get() { - var model = __nccwpck_require__(64908); - model.paginators = (__nccwpck_require__(57305)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(71106)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2017-03-25', { - get: function get() { - var model = __nccwpck_require__(76944); - model.paginators = (__nccwpck_require__(83654)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(83406)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2017-10-30', { - get: function get() { - var model = __nccwpck_require__(80198); - model.paginators = (__nccwpck_require__(52915)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(13399)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2018-06-18', { - get: function get() { - var model = __nccwpck_require__(29549); - model.paginators = (__nccwpck_require__(7805)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(2353)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2018-11-05', { - get: function get() { - var model = __nccwpck_require__(22253); - model.paginators = (__nccwpck_require__(29533)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(36883)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2019-03-26', { - get: function get() { - var model = __nccwpck_require__(29574); - model.paginators = (__nccwpck_require__(35556)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(97142)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudfront'], '2020-05-31', { - get: function get() { - var model = __nccwpck_require__(66310); - model.paginators = (__nccwpck_require__(48335)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(83517)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudFront; - - -/***/ }), - -/***/ 59976: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudhsm'] = {}; -AWS.CloudHSM = Service.defineService('cloudhsm', ['2014-05-30']); -Object.defineProperty(apiLoader.services['cloudhsm'], '2014-05-30', { - get: function get() { - var model = __nccwpck_require__(18637); - model.paginators = (__nccwpck_require__(18988)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudHSM; - - -/***/ }), - -/***/ 70889: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudhsmv2'] = {}; -AWS.CloudHSMV2 = Service.defineService('cloudhsmv2', ['2017-04-28']); -Object.defineProperty(apiLoader.services['cloudhsmv2'], '2017-04-28', { - get: function get() { - var model = __nccwpck_require__(90554); - model.paginators = (__nccwpck_require__(77334)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CloudHSMV2; - - -/***/ }), - -/***/ 72321: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudsearch'] = {}; -AWS.CloudSearch = Service.defineService('cloudsearch', ['2011-02-01', '2013-01-01']); -Object.defineProperty(apiLoader.services['cloudsearch'], '2011-02-01', { - get: function get() { - var model = __nccwpck_require__(11732); - model.paginators = (__nccwpck_require__(51357)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['cloudsearch'], '2013-01-01', { - get: function get() { - var model = __nccwpck_require__(56880); - model.paginators = (__nccwpck_require__(81127)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateDefaultCredentialProvider = exports.getDefaultRoleAssumerWithWebIdentity = exports.getDefaultRoleAssumer = void 0; +const AssumeRoleCommand_1 = __nccwpck_require__(59802); +const AssumeRoleWithWebIdentityCommand_1 = __nccwpck_require__(37451); +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const decorateDefaultRegion = (region) => { + if (typeof region !== "function") { + return region === undefined ? ASSUME_ROLE_DEFAULT_REGION : region; + } + return async () => { + try { + return await region(); + } + catch (e) { + return ASSUME_ROLE_DEFAULT_REGION; + } + }; +}; +const getDefaultRoleAssumer = (stsOptions, stsClientCtor) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger, region, requestHandler } = stsOptions; + stsClient = new stsClientCtor({ + logger, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: decorateDefaultRegion(region || stsOptions.region), + ...(requestHandler ? { requestHandler } : {}), + }); + } + const { Credentials } = await stsClient.send(new AssumeRoleCommand_1.AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + return { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + }; + }; +}; +exports.getDefaultRoleAssumer = getDefaultRoleAssumer; +const getDefaultRoleAssumerWithWebIdentity = (stsOptions, stsClientCtor) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger, region, requestHandler } = stsOptions; + stsClient = new stsClientCtor({ + logger, + region: decorateDefaultRegion(region || stsOptions.region), + ...(requestHandler ? { requestHandler } : {}), + }); + } + const { Credentials } = await stsClient.send(new AssumeRoleWithWebIdentityCommand_1.AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + return { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + }; + }; +}; +exports.getDefaultRoleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity; +const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: (0, exports.getDefaultRoleAssumer)(input, input.stsClientCtor), + roleAssumerWithWebIdentity: (0, exports.getDefaultRoleAssumerWithWebIdentity)(input, input.stsClientCtor), + ...input, }); - -module.exports = AWS.CloudSearch; +exports.decorateDefaultCredentialProvider = decorateDefaultCredentialProvider; /***/ }), -/***/ 64072: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20510: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudsearchdomain'] = {}; -AWS.CloudSearchDomain = Service.defineService('cloudsearchdomain', ['2013-01-01']); -__nccwpck_require__(48571); -Object.defineProperty(apiLoader.services['cloudsearchdomain'], '2013-01-01', { - get: function get() { - var model = __nccwpck_require__(78255); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudSearchDomain; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }; +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; /***/ }), -/***/ 65512: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 41203: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudtrail'] = {}; -AWS.CloudTrail = Service.defineService('cloudtrail', ['2013-11-01']); -Object.defineProperty(apiLoader.services['cloudtrail'], '2013-11-01', { - get: function get() { - var model = __nccwpck_require__(11506); - model.paginators = (__nccwpck_require__(27523)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudTrail; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(13350); +const ruleset_1 = __nccwpck_require__(86882); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + }); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; /***/ }), -/***/ 31191: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 86882: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudtraildata'] = {}; -AWS.CloudTrailData = Service.defineService('cloudtraildata', ['2021-08-11']); -Object.defineProperty(apiLoader.services['cloudtraildata'], '2021-08-11', { - get: function get() { - var model = __nccwpck_require__(27372); - model.paginators = (__nccwpck_require__(79223)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudTrailData; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "tree", e = "stringEquals", f = "sigv4", g = "sts", h = "us-east-1", i = "endpoint", j = "https://sts.{Region}.{PartitionResult#dnsSuffix}", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": f, "signingName": g, "signingRegion": h }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: e, [I]: [q, "aws-global"] }], [i]: u, [G]: i }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: c, [I]: [true, { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], [G]: d, rules: [{ conditions: [{ [H]: e, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: i }, w, { conditions: [{ [H]: e, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, h] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-east-2"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-west-1"] }], endpoint: u, [G]: i }, { conditions: [{ [H]: e, [I]: [q, "us-west-2"] }], endpoint: u, [G]: i }, { endpoint: { url: j, properties: { authSchemes: [{ name: f, signingName: g, signingRegion: "{Region}" }] }, headers: v }, [G]: i }] }, { conditions: C, [G]: d, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { [G]: d, rules: [{ conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: i }] }] }, { [G]: d, rules: [{ conditions: [p], [G]: d, rules: [{ conditions: [r], [G]: d, rules: [{ conditions: [x, y], [G]: d, rules: [{ conditions: [z, B], [G]: d, rules: [{ [G]: d, rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }] }, { conditions: D, [G]: d, rules: [{ conditions: [z], [G]: d, rules: [{ [G]: d, rules: [{ conditions: [{ [H]: e, [I]: ["aws-us-gov", { [H]: l, [I]: [A, "name"] }] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: i }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }] }, { conditions: E, [G]: d, rules: [{ conditions: [B], [G]: d, rules: [{ [G]: d, rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: i }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }] }, { [G]: d, rules: [w, { endpoint: { url: j, properties: v, headers: v }, [G]: i }] }] }] }, { error: "Invalid Configuration: Missing Region", [G]: k }] }] }; +exports.ruleSet = _data; /***/ }), -/***/ 6763: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52209: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudwatch'] = {}; -AWS.CloudWatch = Service.defineService('cloudwatch', ['2010-08-01']); -Object.defineProperty(apiLoader.services['cloudwatch'], '2010-08-01', { - get: function get() { - var model = __nccwpck_require__(16363); - model.paginators = (__nccwpck_require__(46675)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(21466)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudWatch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSServiceException = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(64195), exports); +tslib_1.__exportStar(__nccwpck_require__(32605), exports); +tslib_1.__exportStar(__nccwpck_require__(55716), exports); +tslib_1.__exportStar(__nccwpck_require__(20106), exports); +tslib_1.__exportStar(__nccwpck_require__(88028), exports); +var STSServiceException_1 = __nccwpck_require__(26450); +Object.defineProperty(exports, "STSServiceException", ({ enumerable: true, get: function () { return STSServiceException_1.STSServiceException; } })); /***/ }), -/***/ 38124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26450: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudwatchevents'] = {}; -AWS.CloudWatchEvents = Service.defineService('cloudwatchevents', ['2014-02-03*', '2015-10-07']); -Object.defineProperty(apiLoader.services['cloudwatchevents'], '2015-10-07', { - get: function get() { - var model = __nccwpck_require__(40299); - model.paginators = (__nccwpck_require__(54031)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudWatchEvents; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSServiceException = exports.__ServiceException = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +Object.defineProperty(exports, "__ServiceException", ({ enumerable: true, get: function () { return smithy_client_1.ServiceException; } })); +class STSServiceException extends smithy_client_1.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} +exports.STSServiceException = STSServiceException; /***/ }), -/***/ 96693: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20106: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cloudwatchlogs'] = {}; -AWS.CloudWatchLogs = Service.defineService('cloudwatchlogs', ['2014-03-28']); -Object.defineProperty(apiLoader.services['cloudwatchlogs'], '2014-03-28', { - get: function get() { - var model = __nccwpck_require__(73044); - model.paginators = (__nccwpck_require__(15472)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CloudWatchLogs; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(21780), exports); /***/ }), -/***/ 91983: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codeartifact'] = {}; -AWS.CodeArtifact = Service.defineService('codeartifact', ['2018-09-22']); -Object.defineProperty(apiLoader.services['codeartifact'], '2018-09-22', { - get: function get() { - var model = __nccwpck_require__(87923); - model.paginators = (__nccwpck_require__(40983)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CodeArtifact; - - -/***/ }), +/***/ 21780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ 60450: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codebuild'] = {}; -AWS.CodeBuild = Service.defineService('codebuild', ['2016-10-06']); -Object.defineProperty(apiLoader.services['codebuild'], '2016-10-06', { - get: function get() { - var model = __nccwpck_require__(40893); - model.paginators = (__nccwpck_require__(23010)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GetSessionTokenResponseFilterSensitiveLog = exports.GetFederationTokenResponseFilterSensitiveLog = exports.AssumeRoleWithWebIdentityResponseFilterSensitiveLog = exports.AssumeRoleWithWebIdentityRequestFilterSensitiveLog = exports.AssumeRoleWithSAMLResponseFilterSensitiveLog = exports.AssumeRoleWithSAMLRequestFilterSensitiveLog = exports.AssumeRoleResponseFilterSensitiveLog = exports.CredentialsFilterSensitiveLog = exports.InvalidAuthorizationMessageException = exports.IDPCommunicationErrorException = exports.InvalidIdentityTokenException = exports.IDPRejectedClaimException = exports.RegionDisabledException = exports.PackedPolicyTooLargeException = exports.MalformedPolicyDocumentException = exports.ExpiredTokenException = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const STSServiceException_1 = __nccwpck_require__(26450); +class ExpiredTokenException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +exports.ExpiredTokenException = ExpiredTokenException; +class MalformedPolicyDocumentException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + this.name = "MalformedPolicyDocumentException"; + this.$fault = "client"; + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +exports.MalformedPolicyDocumentException = MalformedPolicyDocumentException; +class PackedPolicyTooLargeException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + this.name = "PackedPolicyTooLargeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +exports.PackedPolicyTooLargeException = PackedPolicyTooLargeException; +class RegionDisabledException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + this.name = "RegionDisabledException"; + this.$fault = "client"; + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +exports.RegionDisabledException = RegionDisabledException; +class IDPRejectedClaimException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + this.name = "IDPRejectedClaimException"; + this.$fault = "client"; + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +exports.IDPRejectedClaimException = IDPRejectedClaimException; +class InvalidIdentityTokenException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + this.name = "InvalidIdentityTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +exports.InvalidIdentityTokenException = InvalidIdentityTokenException; +class IDPCommunicationErrorException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + this.name = "IDPCommunicationErrorException"; + this.$fault = "client"; + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} +exports.IDPCommunicationErrorException = IDPCommunicationErrorException; +class InvalidAuthorizationMessageException extends STSServiceException_1.STSServiceException { + constructor(opts) { + super({ + name: "InvalidAuthorizationMessageException", + $fault: "client", + ...opts, + }); + this.name = "InvalidAuthorizationMessageException"; + this.$fault = "client"; + Object.setPrototypeOf(this, InvalidAuthorizationMessageException.prototype); + } +} +exports.InvalidAuthorizationMessageException = InvalidAuthorizationMessageException; +const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: smithy_client_1.SENSITIVE_STRING }), }); - -module.exports = AWS.CodeBuild; - - -/***/ }), - -/***/ 19499: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codecatalyst'] = {}; -AWS.CodeCatalyst = Service.defineService('codecatalyst', ['2022-09-28']); -Object.defineProperty(apiLoader.services['codecatalyst'], '2022-09-28', { - get: function get() { - var model = __nccwpck_require__(22999); - model.paginators = (__nccwpck_require__(14522)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(42522)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true +exports.CredentialsFilterSensitiveLog = CredentialsFilterSensitiveLog; +const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), }); - -module.exports = AWS.CodeCatalyst; - - -/***/ }), - -/***/ 71323: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codecommit'] = {}; -AWS.CodeCommit = Service.defineService('codecommit', ['2015-04-13']); -Object.defineProperty(apiLoader.services['codecommit'], '2015-04-13', { - get: function get() { - var model = __nccwpck_require__(57144); - model.paginators = (__nccwpck_require__(62599)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +exports.AssumeRoleResponseFilterSensitiveLog = AssumeRoleResponseFilterSensitiveLog; +const AssumeRoleWithSAMLRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SAMLAssertion && { SAMLAssertion: smithy_client_1.SENSITIVE_STRING }), }); - -module.exports = AWS.CodeCommit; - - -/***/ }), - -/***/ 54599: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codedeploy'] = {}; -AWS.CodeDeploy = Service.defineService('codedeploy', ['2014-10-06']); -Object.defineProperty(apiLoader.services['codedeploy'], '2014-10-06', { - get: function get() { - var model = __nccwpck_require__(10967); - model.paginators = (__nccwpck_require__(1917)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(52416)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true +exports.AssumeRoleWithSAMLRequestFilterSensitiveLog = AssumeRoleWithSAMLRequestFilterSensitiveLog; +const AssumeRoleWithSAMLResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), }); - -module.exports = AWS.CodeDeploy; - - -/***/ }), - -/***/ 65704: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codeguruprofiler'] = {}; -AWS.CodeGuruProfiler = Service.defineService('codeguruprofiler', ['2019-07-18']); -Object.defineProperty(apiLoader.services['codeguruprofiler'], '2019-07-18', { - get: function get() { - var model = __nccwpck_require__(34890); - model.paginators = (__nccwpck_require__(25274)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +exports.AssumeRoleWithSAMLResponseFilterSensitiveLog = AssumeRoleWithSAMLResponseFilterSensitiveLog; +const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: smithy_client_1.SENSITIVE_STRING }), }); - -module.exports = AWS.CodeGuruProfiler; - - -/***/ }), - -/***/ 60070: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codegurureviewer'] = {}; -AWS.CodeGuruReviewer = Service.defineService('codegurureviewer', ['2019-09-19']); -Object.defineProperty(apiLoader.services['codegurureviewer'], '2019-09-19', { - get: function get() { - var model = __nccwpck_require__(66739); - model.paginators = (__nccwpck_require__(37775)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(69276)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true +exports.AssumeRoleWithWebIdentityRequestFilterSensitiveLog = AssumeRoleWithWebIdentityRequestFilterSensitiveLog; +const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), }); - -module.exports = AWS.CodeGuruReviewer; - - -/***/ }), - -/***/ 22938: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codepipeline'] = {}; -AWS.CodePipeline = Service.defineService('codepipeline', ['2015-07-09']); -Object.defineProperty(apiLoader.services['codepipeline'], '2015-07-09', { - get: function get() { - var model = __nccwpck_require__(4039); - model.paginators = (__nccwpck_require__(78953)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +exports.AssumeRoleWithWebIdentityResponseFilterSensitiveLog = AssumeRoleWithWebIdentityResponseFilterSensitiveLog; +const GetFederationTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), }); - -module.exports = AWS.CodePipeline; - - -/***/ }), - -/***/ 98336: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codestar'] = {}; -AWS.CodeStar = Service.defineService('codestar', ['2017-04-19']); -Object.defineProperty(apiLoader.services['codestar'], '2017-04-19', { - get: function get() { - var model = __nccwpck_require__(12425); - model.paginators = (__nccwpck_require__(70046)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +exports.GetFederationTokenResponseFilterSensitiveLog = GetFederationTokenResponseFilterSensitiveLog; +const GetSessionTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: (0, exports.CredentialsFilterSensitiveLog)(obj.Credentials) }), }); - -module.exports = AWS.CodeStar; +exports.GetSessionTokenResponseFilterSensitiveLog = GetSessionTokenResponseFilterSensitiveLog; /***/ }), -/***/ 78270: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codestarconnections'] = {}; -AWS.CodeStarconnections = Service.defineService('codestarconnections', ['2019-12-01']); -Object.defineProperty(apiLoader.services['codestarconnections'], '2019-12-01', { - get: function get() { - var model = __nccwpck_require__(88428); - model.paginators = (__nccwpck_require__(31506)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.CodeStarconnections; +/***/ 10740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; -/***/ }), - -/***/ 15141: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['codestarnotifications'] = {}; -AWS.CodeStarNotifications = Service.defineService('codestarnotifications', ['2019-10-15']); -Object.defineProperty(apiLoader.services['codestarnotifications'], '2019-10-15', { - get: function get() { - var model = __nccwpck_require__(33362); - model.paginators = (__nccwpck_require__(44301)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.de_GetSessionTokenCommand = exports.de_GetFederationTokenCommand = exports.de_GetCallerIdentityCommand = exports.de_GetAccessKeyInfoCommand = exports.de_DecodeAuthorizationMessageCommand = exports.de_AssumeRoleWithWebIdentityCommand = exports.de_AssumeRoleWithSAMLCommand = exports.de_AssumeRoleCommand = exports.se_GetSessionTokenCommand = exports.se_GetFederationTokenCommand = exports.se_GetCallerIdentityCommand = exports.se_GetAccessKeyInfoCommand = exports.se_DecodeAuthorizationMessageCommand = exports.se_AssumeRoleWithWebIdentityCommand = exports.se_AssumeRoleWithSAMLCommand = exports.se_AssumeRoleCommand = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const smithy_client_1 = __nccwpck_require__(63570); +const fast_xml_parser_1 = __nccwpck_require__(12603); +const models_0_1 = __nccwpck_require__(21780); +const STSServiceException_1 = __nccwpck_require__(26450); +const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + Action: "AssumeRole", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleCommand = se_AssumeRoleCommand; +const se_AssumeRoleWithSAMLCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithSAMLRequest(input, context), + Action: "AssumeRoleWithSAML", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleWithSAMLCommand = se_AssumeRoleWithSAMLCommand; +const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + Action: "AssumeRoleWithWebIdentity", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_AssumeRoleWithWebIdentityCommand = se_AssumeRoleWithWebIdentityCommand; +const se_DecodeAuthorizationMessageCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_DecodeAuthorizationMessageRequest(input, context), + Action: "DecodeAuthorizationMessage", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_DecodeAuthorizationMessageCommand = se_DecodeAuthorizationMessageCommand; +const se_GetAccessKeyInfoCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetAccessKeyInfoRequest(input, context), + Action: "GetAccessKeyInfo", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetAccessKeyInfoCommand = se_GetAccessKeyInfoCommand; +const se_GetCallerIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetCallerIdentityRequest(input, context), + Action: "GetCallerIdentity", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetCallerIdentityCommand = se_GetCallerIdentityCommand; +const se_GetFederationTokenCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetFederationTokenRequest(input, context), + Action: "GetFederationToken", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetFederationTokenCommand = se_GetFederationTokenCommand; +const se_GetSessionTokenCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetSessionTokenRequest(input, context), + Action: "GetSessionToken", + Version: "2011-06-15", + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +exports.se_GetSessionTokenCommand = se_GetSessionTokenCommand; +const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleCommand = de_AssumeRoleCommand; +const de_AssumeRoleCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_AssumeRoleWithSAMLCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleWithSAMLCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithSAMLResponse(data.AssumeRoleWithSAMLResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleWithSAMLCommand = de_AssumeRoleWithSAMLCommand; +const de_AssumeRoleWithSAMLCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_AssumeRoleWithWebIdentityCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_AssumeRoleWithWebIdentityCommand = de_AssumeRoleWithWebIdentityCommand; +const de_AssumeRoleWithWebIdentityCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_DecodeAuthorizationMessageCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_DecodeAuthorizationMessageCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DecodeAuthorizationMessageResponse(data.DecodeAuthorizationMessageResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_DecodeAuthorizationMessageCommand = de_DecodeAuthorizationMessageCommand; +const de_DecodeAuthorizationMessageCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidAuthorizationMessageException": + case "com.amazonaws.sts#InvalidAuthorizationMessageException": + throw await de_InvalidAuthorizationMessageExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_GetAccessKeyInfoCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetAccessKeyInfoCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetAccessKeyInfoResponse(data.GetAccessKeyInfoResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetAccessKeyInfoCommand = de_GetAccessKeyInfoCommand; +const de_GetAccessKeyInfoCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); +}; +const de_GetCallerIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetCallerIdentityCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetCallerIdentityResponse(data.GetCallerIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetCallerIdentityCommand = de_GetCallerIdentityCommand; +const de_GetCallerIdentityCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); +}; +const de_GetFederationTokenCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetFederationTokenCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetFederationTokenResponse(data.GetFederationTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetFederationTokenCommand = de_GetFederationTokenCommand; +const de_GetFederationTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_GetSessionTokenCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_GetSessionTokenCommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetSessionTokenResponse(data.GetSessionTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +exports.de_GetSessionTokenCommand = de_GetSessionTokenCommand; +const de_GetSessionTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new models_0_1.ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new models_0_1.IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new models_0_1.IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_InvalidAuthorizationMessageExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidAuthorizationMessageException(body.Error, context); + const exception = new models_0_1.InvalidAuthorizationMessageException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new models_0_1.InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new models_0_1.MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new models_0_1.PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new models_0_1.RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return (0, smithy_client_1.decorateServiceException)(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.RoleSessionName != null) { + entries["RoleSessionName"] = input.RoleSessionName; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.Tags != null) { + const memberEntries = se_tagListType(input.Tags, context); + if (input.Tags?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input.TransitiveTagKeys != null) { + const memberEntries = se_tagKeyListType(input.TransitiveTagKeys, context); + if (input.TransitiveTagKeys?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input.ExternalId != null) { + entries["ExternalId"] = input.ExternalId; + } + if (input.SerialNumber != null) { + entries["SerialNumber"] = input.SerialNumber; + } + if (input.TokenCode != null) { + entries["TokenCode"] = input.TokenCode; + } + if (input.SourceIdentity != null) { + entries["SourceIdentity"] = input.SourceIdentity; + } + if (input.ProvidedContexts != null) { + const memberEntries = se_ProvidedContextsListType(input.ProvidedContexts, context); + if (input.ProvidedContexts?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithSAMLRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.PrincipalArn != null) { + entries["PrincipalArn"] = input.PrincipalArn; + } + if (input.SAMLAssertion != null) { + entries["SAMLAssertion"] = input.SAMLAssertion; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input.RoleArn != null) { + entries["RoleArn"] = input.RoleArn; + } + if (input.RoleSessionName != null) { + entries["RoleSessionName"] = input.RoleSessionName; + } + if (input.WebIdentityToken != null) { + entries["WebIdentityToken"] = input.WebIdentityToken; + } + if (input.ProviderId != null) { + entries["ProviderId"] = input.ProviderId; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + return entries; +}; +const se_DecodeAuthorizationMessageRequest = (input, context) => { + const entries = {}; + if (input.EncodedMessage != null) { + entries["EncodedMessage"] = input.EncodedMessage; + } + return entries; +}; +const se_GetAccessKeyInfoRequest = (input, context) => { + const entries = {}; + if (input.AccessKeyId != null) { + entries["AccessKeyId"] = input.AccessKeyId; + } + return entries; +}; +const se_GetCallerIdentityRequest = (input, context) => { + const entries = {}; + return entries; +}; +const se_GetFederationTokenRequest = (input, context) => { + const entries = {}; + if (input.Name != null) { + entries["Name"] = input.Name; + } + if (input.Policy != null) { + entries["Policy"] = input.Policy; + } + if (input.PolicyArns != null) { + const memberEntries = se_policyDescriptorListType(input.PolicyArns, context); + if (input.PolicyArns?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.Tags != null) { + const memberEntries = se_tagListType(input.Tags, context); + if (input.Tags?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_GetSessionTokenRequest = (input, context) => { + const entries = {}; + if (input.DurationSeconds != null) { + entries["DurationSeconds"] = input.DurationSeconds; + } + if (input.SerialNumber != null) { + entries["SerialNumber"] = input.SerialNumber; + } + if (input.TokenCode != null) { + entries["TokenCode"] = input.TokenCode; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input.arn != null) { + entries["arn"] = input.arn; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input.ProviderArn != null) { + entries["ProviderArn"] = input.ProviderArn; + } + if (input.ContextAssertion != null) { + entries["ContextAssertion"] = input.ContextAssertion; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input.Key != null) { + entries["Key"] = input.Key; + } + if (input.Value != null) { + entries["Value"] = input.Value; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output["AssumedRoleId"] !== undefined) { + contents.AssumedRoleId = (0, smithy_client_1.expectString)(output["AssumedRoleId"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_AssumeRoleWithSAMLResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["Subject"] !== undefined) { + contents.Subject = (0, smithy_client_1.expectString)(output["Subject"]); + } + if (output["SubjectType"] !== undefined) { + contents.SubjectType = (0, smithy_client_1.expectString)(output["SubjectType"]); + } + if (output["Issuer"] !== undefined) { + contents.Issuer = (0, smithy_client_1.expectString)(output["Issuer"]); + } + if (output["Audience"] !== undefined) { + contents.Audience = (0, smithy_client_1.expectString)(output["Audience"]); + } + if (output["NameQualifier"] !== undefined) { + contents.NameQualifier = (0, smithy_client_1.expectString)(output["NameQualifier"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["SubjectFromWebIdentityToken"] !== undefined) { + contents.SubjectFromWebIdentityToken = (0, smithy_client_1.expectString)(output["SubjectFromWebIdentityToken"]); + } + if (output["AssumedRoleUser"] !== undefined) { + contents.AssumedRoleUser = de_AssumedRoleUser(output["AssumedRoleUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + if (output["Provider"] !== undefined) { + contents.Provider = (0, smithy_client_1.expectString)(output["Provider"]); + } + if (output["Audience"] !== undefined) { + contents.Audience = (0, smithy_client_1.expectString)(output["Audience"]); + } + if (output["SourceIdentity"] !== undefined) { + contents.SourceIdentity = (0, smithy_client_1.expectString)(output["SourceIdentity"]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output["AccessKeyId"] !== undefined) { + contents.AccessKeyId = (0, smithy_client_1.expectString)(output["AccessKeyId"]); + } + if (output["SecretAccessKey"] !== undefined) { + contents.SecretAccessKey = (0, smithy_client_1.expectString)(output["SecretAccessKey"]); + } + if (output["SessionToken"] !== undefined) { + contents.SessionToken = (0, smithy_client_1.expectString)(output["SessionToken"]); + } + if (output["Expiration"] !== undefined) { + contents.Expiration = (0, smithy_client_1.expectNonNull)((0, smithy_client_1.parseRfc3339DateTimeWithOffset)(output["Expiration"])); + } + return contents; +}; +const de_DecodeAuthorizationMessageResponse = (output, context) => { + const contents = {}; + if (output["DecodedMessage"] !== undefined) { + contents.DecodedMessage = (0, smithy_client_1.expectString)(output["DecodedMessage"]); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_FederatedUser = (output, context) => { + const contents = {}; + if (output["FederatedUserId"] !== undefined) { + contents.FederatedUserId = (0, smithy_client_1.expectString)(output["FederatedUserId"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_GetAccessKeyInfoResponse = (output, context) => { + const contents = {}; + if (output["Account"] !== undefined) { + contents.Account = (0, smithy_client_1.expectString)(output["Account"]); + } + return contents; +}; +const de_GetCallerIdentityResponse = (output, context) => { + const contents = {}; + if (output["UserId"] !== undefined) { + contents.UserId = (0, smithy_client_1.expectString)(output["UserId"]); + } + if (output["Account"] !== undefined) { + contents.Account = (0, smithy_client_1.expectString)(output["Account"]); + } + if (output["Arn"] !== undefined) { + contents.Arn = (0, smithy_client_1.expectString)(output["Arn"]); + } + return contents; +}; +const de_GetFederationTokenResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + if (output["FederatedUser"] !== undefined) { + contents.FederatedUser = de_FederatedUser(output["FederatedUser"], context); + } + if (output["PackedPolicySize"] !== undefined) { + contents.PackedPolicySize = (0, smithy_client_1.strictParseInt32)(output["PackedPolicySize"]); + } + return contents; +}; +const de_GetSessionTokenResponse = (output, context) => { + const contents = {}; + if (output["Credentials"] !== undefined) { + contents.Credentials = de_Credentials(output["Credentials"], context); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_InvalidAuthorizationMessageException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output["message"] !== undefined) { + contents.message = (0, smithy_client_1.expectString)(output["message"]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => (0, smithy_client_1.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = (0, smithy_client_1.withBaseException)(STSServiceException_1.STSServiceException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new protocol_http_1.HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new fast_xml_parser_1.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + const parsedObj = parser.parse(encoded); + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, smithy_client_1.getValueFromTextNode)(parsedObjToReturn); + } + return {}; }); - -module.exports = AWS.CodeStarNotifications; +const parseErrorBody = async (errorBody, context) => { + const value = await parseBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => (0, smithy_client_1.extendedEncodeURIComponent)(key) + "=" + (0, smithy_client_1.extendedEncodeURIComponent)(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; /***/ }), -/***/ 58291: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 83405: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cognitoidentity'] = {}; -AWS.CognitoIdentity = Service.defineService('cognitoidentity', ['2014-06-30']); -Object.defineProperty(apiLoader.services['cognitoidentity'], '2014-06-30', { - get: function get() { - var model = __nccwpck_require__(57377); - model.paginators = (__nccwpck_require__(85010)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CognitoIdentity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(4351); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(7947)); +const defaultStsRoleAssumers_1 = __nccwpck_require__(90048); +const credential_provider_node_1 = __nccwpck_require__(75531); +const util_user_agent_node_1 = __nccwpck_require__(98095); +const config_resolver_1 = __nccwpck_require__(53098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_1 = __nccwpck_require__(96039); +const node_config_provider_1 = __nccwpck_require__(33461); +const node_http_handler_1 = __nccwpck_require__(20258); +const util_body_length_node_1 = __nccwpck_require__(68075); +const util_retry_1 = __nccwpck_require__(84902); +const runtimeConfig_shared_1 = __nccwpck_require__(52642); +const smithy_client_1 = __nccwpck_require__(63570); +const util_defaults_mode_node_1 = __nccwpck_require__(72429); +const smithy_client_2 = __nccwpck_require__(63570); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? (0, defaultStsRoleAssumers_1.decorateDefaultCredentialProvider)(credential_provider_node_1.defaultProvider), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: config?.region ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: config?.requestHandler ?? new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; /***/ }), -/***/ 31379: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52642: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cognitoidentityserviceprovider'] = {}; -AWS.CognitoIdentityServiceProvider = Service.defineService('cognitoidentityserviceprovider', ['2016-04-18']); -Object.defineProperty(apiLoader.services['cognitoidentityserviceprovider'], '2016-04-18', { - get: function get() { - var model = __nccwpck_require__(53166); - model.paginators = (__nccwpck_require__(17149)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CognitoIdentityServiceProvider; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const smithy_client_1 = __nccwpck_require__(63570); +const url_parser_1 = __nccwpck_require__(14681); +const util_base64_1 = __nccwpck_require__(75600); +const util_utf8_1 = __nccwpck_require__(41895); +const endpointResolver_1 = __nccwpck_require__(41203); +const getRuntimeConfig = (config) => ({ + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, +}); +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 80255: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromEnv = exports.ENV_EXPIRATION = exports.ENV_SESSION = exports.ENV_SECRET = exports.ENV_KEY = void 0; +const property_provider_1 = __nccwpck_require__(79721); +exports.ENV_KEY = "AWS_ACCESS_KEY_ID"; +exports.ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +exports.ENV_SESSION = "AWS_SESSION_TOKEN"; +exports.ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +const fromEnv = () => async () => { + const accessKeyId = process.env[exports.ENV_KEY]; + const secretAccessKey = process.env[exports.ENV_SECRET]; + const sessionToken = process.env[exports.ENV_SESSION]; + const expiry = process.env[exports.ENV_EXPIRATION]; + if (accessKeyId && secretAccessKey) { + return { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + }; + } + throw new property_provider_1.CredentialsProviderError("Unable to find environment variable credentials."); +}; +exports.fromEnv = fromEnv; /***/ }), -/***/ 74770: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 15972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cognitosync'] = {}; -AWS.CognitoSync = Service.defineService('cognitosync', ['2014-06-30']); -Object.defineProperty(apiLoader.services['cognitosync'], '2014-06-30', { - get: function get() { - var model = __nccwpck_require__(29128); - model.paginators = (__nccwpck_require__(5865)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CognitoSync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80255), exports); /***/ }), -/***/ 62878: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55442: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['comprehend'] = {}; -AWS.Comprehend = Service.defineService('comprehend', ['2017-11-27']); -Object.defineProperty(apiLoader.services['comprehend'], '2017-11-27', { - get: function get() { - var model = __nccwpck_require__(24433); - model.paginators = (__nccwpck_require__(82518)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Comprehend; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromIni = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const resolveProfileData_1 = __nccwpck_require__(95653); +const fromIni = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + return (0, resolveProfileData_1.resolveProfileData)((0, shared_ini_file_loader_1.getProfileName)(init), profiles, init); +}; +exports.fromIni = fromIni; /***/ }), -/***/ 32349: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74203: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['comprehendmedical'] = {}; -AWS.ComprehendMedical = Service.defineService('comprehendmedical', ['2018-10-30']); -Object.defineProperty(apiLoader.services['comprehendmedical'], '2018-10-30', { - get: function get() { - var model = __nccwpck_require__(96649); - model.paginators = (__nccwpck_require__(43172)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ComprehendMedical; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(55442), exports); /***/ }), -/***/ 64459: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 60853: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['computeoptimizer'] = {}; -AWS.ComputeOptimizer = Service.defineService('computeoptimizer', ['2019-11-01']); -Object.defineProperty(apiLoader.services['computeoptimizer'], '2019-11-01', { - get: function get() { - var model = __nccwpck_require__(85802); - model.paginators = (__nccwpck_require__(6831)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ComputeOptimizer; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveAssumeRoleCredentials = exports.isAssumeRoleProfile = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const resolveCredentialSource_1 = __nccwpck_require__(82458); +const resolveProfileData_1 = __nccwpck_require__(95653); +const isAssumeRoleProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg) || isAssumeRoleWithProviderProfile(arg)); +exports.isAssumeRoleProfile = isAssumeRoleProfile; +const isAssumeRoleWithSourceProfile = (arg) => typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; +const isAssumeRoleWithProviderProfile = (arg) => typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; +const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + const data = profiles[profileName]; + if (!options.roleAssumer) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} requires a role to be assumed, but no role assumption callback was provided.`, false); + } + const { source_profile } = data; + if (source_profile && source_profile in visitedProfiles) { + throw new property_provider_1.CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${(0, shared_ini_file_loader_1.getProfileName)(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), false); + } + const sourceCredsProvider = source_profile + ? (0, resolveProfileData_1.resolveProfileData)(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }) + : (0, resolveCredentialSource_1.resolveCredentialSource)(data.credential_source, profileName)(); + const params = { + RoleArn: data.role_arn, + RoleSessionName: data.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: data.external_id, + }; + const { mfa_serial } = data; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, false); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params); +}; +exports.resolveAssumeRoleCredentials = resolveAssumeRoleCredentials; /***/ }), -/***/ 34061: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['configservice'] = {}; -AWS.ConfigService = Service.defineService('configservice', ['2014-11-12']); -Object.defineProperty(apiLoader.services['configservice'], '2014-11-12', { - get: function get() { - var model = __nccwpck_require__(47124); - model.paginators = (__nccwpck_require__(85980)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ConfigService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveCredentialSource = void 0; +const credential_provider_env_1 = __nccwpck_require__(15972); +const credential_provider_imds_1 = __nccwpck_require__(7477); +const property_provider_1 = __nccwpck_require__(79721); +const resolveCredentialSource = (credentialSource, profileName) => { + const sourceProvidersMap = { + EcsContainer: credential_provider_imds_1.fromContainerMetadata, + Ec2InstanceMetadata: credential_provider_imds_1.fromInstanceMetadata, + Environment: credential_provider_env_1.fromEnv, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource](); + } + else { + throw new property_provider_1.CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`); + } +}; +exports.resolveCredentialSource = resolveCredentialSource; /***/ }), -/***/ 13879: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 69993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['connect'] = {}; -AWS.Connect = Service.defineService('connect', ['2017-08-08']); -Object.defineProperty(apiLoader.services['connect'], '2017-08-08', { - get: function get() { - var model = __nccwpck_require__(54511); - model.paginators = (__nccwpck_require__(19742)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Connect; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProcessCredentials = exports.isProcessProfile = void 0; +const credential_provider_process_1 = __nccwpck_require__(89969); +const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +exports.isProcessProfile = isProcessProfile; +const resolveProcessCredentials = async (options, profile) => (0, credential_provider_process_1.fromProcess)({ + ...options, + profile, +})(); +exports.resolveProcessCredentials = resolveProcessCredentials; /***/ }), -/***/ 42789: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 95653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['connectcampaigns'] = {}; -AWS.ConnectCampaigns = Service.defineService('connectcampaigns', ['2021-01-30']); -Object.defineProperty(apiLoader.services['connectcampaigns'], '2021-01-30', { - get: function get() { - var model = __nccwpck_require__(71566); - model.paginators = (__nccwpck_require__(45198)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ConnectCampaigns; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProfileData = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const resolveAssumeRoleCredentials_1 = __nccwpck_require__(60853); +const resolveProcessCredentials_1 = __nccwpck_require__(69993); +const resolveSsoCredentials_1 = __nccwpck_require__(59867); +const resolveStaticCredentials_1 = __nccwpck_require__(33071); +const resolveWebIdentityCredentials_1 = __nccwpck_require__(58342); +const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && (0, resolveStaticCredentials_1.isStaticCredsProfile)(data)) { + return (0, resolveStaticCredentials_1.resolveStaticCredentials)(data); + } + if ((0, resolveAssumeRoleCredentials_1.isAssumeRoleProfile)(data)) { + return (0, resolveAssumeRoleCredentials_1.resolveAssumeRoleCredentials)(profileName, profiles, options, visitedProfiles); + } + if ((0, resolveStaticCredentials_1.isStaticCredsProfile)(data)) { + return (0, resolveStaticCredentials_1.resolveStaticCredentials)(data); + } + if ((0, resolveWebIdentityCredentials_1.isWebIdentityProfile)(data)) { + return (0, resolveWebIdentityCredentials_1.resolveWebIdentityCredentials)(data, options); + } + if ((0, resolveProcessCredentials_1.isProcessProfile)(data)) { + return (0, resolveProcessCredentials_1.resolveProcessCredentials)(options, profileName); + } + if ((0, resolveSsoCredentials_1.isSsoProfile)(data)) { + return (0, resolveSsoCredentials_1.resolveSsoCredentials)(data); + } + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} could not be found or parsed in shared credentials file.`); +}; +exports.resolveProfileData = resolveProfileData; /***/ }), -/***/ 72223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59867: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['connectcases'] = {}; -AWS.ConnectCases = Service.defineService('connectcases', ['2022-10-03']); -Object.defineProperty(apiLoader.services['connectcases'], '2022-10-03', { - get: function get() { - var model = __nccwpck_require__(3923); - model.paginators = (__nccwpck_require__(8429)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ConnectCases; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSsoCredentials = exports.isSsoProfile = void 0; +const credential_provider_sso_1 = __nccwpck_require__(26414); +var credential_provider_sso_2 = __nccwpck_require__(26414); +Object.defineProperty(exports, "isSsoProfile", ({ enumerable: true, get: function () { return credential_provider_sso_2.isSsoProfile; } })); +const resolveSsoCredentials = (data) => { + const { sso_start_url, sso_account_id, sso_session, sso_region, sso_role_name } = (0, credential_provider_sso_1.validateSsoProfile)(data); + return (0, credential_provider_sso_1.fromSSO)({ + ssoStartUrl: sso_start_url, + ssoAccountId: sso_account_id, + ssoSession: sso_session, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + })(); +}; +exports.resolveSsoCredentials = resolveSsoCredentials; /***/ }), -/***/ 41847: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 33071: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['connectcontactlens'] = {}; -AWS.ConnectContactLens = Service.defineService('connectcontactlens', ['2020-08-21']); -Object.defineProperty(apiLoader.services['connectcontactlens'], '2020-08-21', { - get: function get() { - var model = __nccwpck_require__(16527); - model.paginators = (__nccwpck_require__(76658)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ConnectContactLens; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveStaticCredentials = exports.isStaticCredsProfile = void 0; +const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1; +exports.isStaticCredsProfile = isStaticCredsProfile; +const resolveStaticCredentials = (profile) => Promise.resolve({ + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, +}); +exports.resolveStaticCredentials = resolveStaticCredentials; /***/ }), -/***/ 94198: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 58342: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['connectparticipant'] = {}; -AWS.ConnectParticipant = Service.defineService('connectparticipant', ['2018-09-07']); -Object.defineProperty(apiLoader.services['connectparticipant'], '2018-09-07', { - get: function get() { - var model = __nccwpck_require__(70132); - model.paginators = (__nccwpck_require__(29947)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ConnectParticipant; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveWebIdentityCredentials = exports.isWebIdentityProfile = void 0; +const credential_provider_web_identity_1 = __nccwpck_require__(15646); +const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +exports.isWebIdentityProfile = isWebIdentityProfile; +const resolveWebIdentityCredentials = async (profile, options) => (0, credential_provider_web_identity_1.fromTokenFile)({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, +})(); +exports.resolveWebIdentityCredentials = resolveWebIdentityCredentials; /***/ }), -/***/ 77574: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 15560: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['controltower'] = {}; -AWS.ControlTower = Service.defineService('controltower', ['2018-05-10']); -Object.defineProperty(apiLoader.services['controltower'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(1095); - model.paginators = (__nccwpck_require__(55167)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ControlTower; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultProvider = void 0; +const credential_provider_env_1 = __nccwpck_require__(15972); +const credential_provider_ini_1 = __nccwpck_require__(74203); +const credential_provider_process_1 = __nccwpck_require__(89969); +const credential_provider_sso_1 = __nccwpck_require__(26414); +const credential_provider_web_identity_1 = __nccwpck_require__(15646); +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const remoteProvider_1 = __nccwpck_require__(50626); +const defaultProvider = (init = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)(...(init.profile || process.env[shared_ini_file_loader_1.ENV_PROFILE] ? [] : [(0, credential_provider_env_1.fromEnv)()]), (0, credential_provider_sso_1.fromSSO)(init), (0, credential_provider_ini_1.fromIni)(init), (0, credential_provider_process_1.fromProcess)(init), (0, credential_provider_web_identity_1.fromTokenFile)(init), (0, remoteProvider_1.remoteProvider)(init), async () => { + throw new property_provider_1.CredentialsProviderError("Could not load credentials from any providers", false); +}), (credentials) => credentials.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000, (credentials) => credentials.expiration !== undefined); +exports.defaultProvider = defaultProvider; /***/ }), -/***/ 79523: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 75531: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['costexplorer'] = {}; -AWS.CostExplorer = Service.defineService('costexplorer', ['2017-10-25']); -Object.defineProperty(apiLoader.services['costexplorer'], '2017-10-25', { - get: function get() { - var model = __nccwpck_require__(4060); - model.paginators = (__nccwpck_require__(75642)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CostExplorer; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(15560), exports); /***/ }), -/***/ 5026: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 50626: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['cur'] = {}; -AWS.CUR = Service.defineService('cur', ['2017-01-06']); -Object.defineProperty(apiLoader.services['cur'], '2017-01-06', { - get: function get() { - var model = __nccwpck_require__(46858); - model.paginators = (__nccwpck_require__(40528)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CUR; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.remoteProvider = exports.ENV_IMDS_DISABLED = void 0; +const credential_provider_imds_1 = __nccwpck_require__(7477); +const property_provider_1 = __nccwpck_require__(79721); +exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +const remoteProvider = (init) => { + if (process.env[credential_provider_imds_1.ENV_CMDS_RELATIVE_URI] || process.env[credential_provider_imds_1.ENV_CMDS_FULL_URI]) { + return (0, credential_provider_imds_1.fromContainerMetadata)(init); + } + if (process.env[exports.ENV_IMDS_DISABLED]) { + return async () => { + throw new property_provider_1.CredentialsProviderError("EC2 Instance Metadata Service access disabled"); + }; + } + return (0, credential_provider_imds_1.fromInstanceMetadata)(init); +}; +exports.remoteProvider = remoteProvider; /***/ }), -/***/ 28379: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 72650: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['customerprofiles'] = {}; -AWS.CustomerProfiles = Service.defineService('customerprofiles', ['2020-08-15']); -Object.defineProperty(apiLoader.services['customerprofiles'], '2020-08-15', { - get: function get() { - var model = __nccwpck_require__(56793); - model.paginators = (__nccwpck_require__(53892)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.CustomerProfiles; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromProcess = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const resolveProcessCredentials_1 = __nccwpck_require__(74926); +const fromProcess = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + return (0, resolveProcessCredentials_1.resolveProcessCredentials)((0, shared_ini_file_loader_1.getProfileName)(init), profiles); +}; +exports.fromProcess = fromProcess; /***/ }), -/***/ 35846: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 41104: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['databrew'] = {}; -AWS.DataBrew = Service.defineService('databrew', ['2017-07-25']); -Object.defineProperty(apiLoader.services['databrew'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(96089); - model.paginators = (__nccwpck_require__(92224)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DataBrew; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getValidatedProcessCredentials = void 0; +const getValidatedProcessCredentials = (profileName, data) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + return { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + }; +}; +exports.getValidatedProcessCredentials = getValidatedProcessCredentials; /***/ }), -/***/ 11024: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dataexchange'] = {}; -AWS.DataExchange = Service.defineService('dataexchange', ['2017-07-25']); -Object.defineProperty(apiLoader.services['dataexchange'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(42346); - model.paginators = (__nccwpck_require__(55607)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(43176)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DataExchange; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(72650), exports); /***/ }), -/***/ 65688: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74926: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['datapipeline'] = {}; -AWS.DataPipeline = Service.defineService('datapipeline', ['2012-10-29']); -Object.defineProperty(apiLoader.services['datapipeline'], '2012-10-29', { - get: function get() { - var model = __nccwpck_require__(79908); - model.paginators = (__nccwpck_require__(89659)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DataPipeline; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveProcessCredentials = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const child_process_1 = __nccwpck_require__(32081); +const util_1 = __nccwpck_require__(73837); +const getValidatedProcessCredentials_1 = __nccwpck_require__(41104); +const resolveProcessCredentials = async (profileName, profiles) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = (0, util_1.promisify)(child_process_1.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch (_a) { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return (0, getValidatedProcessCredentials_1.getValidatedProcessCredentials)(profileName, data); + } + catch (error) { + throw new property_provider_1.CredentialsProviderError(error.message); + } + } + else { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`); + } + } + else { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`); + } +}; +exports.resolveProcessCredentials = resolveProcessCredentials; /***/ }), -/***/ 25308: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35959: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['datasync'] = {}; -AWS.DataSync = Service.defineService('datasync', ['2018-11-09']); -Object.defineProperty(apiLoader.services['datasync'], '2018-11-09', { - get: function get() { - var model = __nccwpck_require__(93640); - model.paginators = (__nccwpck_require__(80063)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DataSync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSSO = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const isSsoProfile_1 = __nccwpck_require__(32572); +const resolveSSOCredentials_1 = __nccwpck_require__(94729); +const validateSsoProfile_1 = __nccwpck_require__(48098); +const fromSSO = (init = {}) => async () => { + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, ssoSession } = init; + const profileName = (0, shared_ini_file_loader_1.getProfileName)(init); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} was not found.`); + } + if (!(0, isSsoProfile_1.isSsoProfile)(profile)) { + throw new property_provider_1.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`); + } + if (profile === null || profile === void 0 ? void 0 : profile.sso_session) { + const ssoSessions = await (0, shared_ini_file_loader_1.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new property_provider_1.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, false); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new property_provider_1.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, false); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = (0, validateSsoProfile_1.validateSsoProfile)(profile); + return (0, resolveSSOCredentials_1.resolveSSOCredentials)({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new property_provider_1.CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"'); + } + else { + return (0, resolveSSOCredentials_1.resolveSSOCredentials)({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + profile: profileName, + }); + } +}; +exports.fromSSO = fromSSO; /***/ }), -/***/ 71398: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dax'] = {}; -AWS.DAX = Service.defineService('dax', ['2017-04-19']); -Object.defineProperty(apiLoader.services['dax'], '2017-04-19', { - get: function get() { - var model = __nccwpck_require__(24709); - model.paginators = (__nccwpck_require__(87564)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DAX; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(35959), exports); +tslib_1.__exportStar(__nccwpck_require__(32572), exports); +tslib_1.__exportStar(__nccwpck_require__(86623), exports); +tslib_1.__exportStar(__nccwpck_require__(48098), exports); /***/ }), -/***/ 60674: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 32572: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['detective'] = {}; -AWS.Detective = Service.defineService('detective', ['2018-10-26']); -Object.defineProperty(apiLoader.services['detective'], '2018-10-26', { - get: function get() { - var model = __nccwpck_require__(25236); - model.paginators = (__nccwpck_require__(46384)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Detective; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSsoProfile = void 0; +const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); +exports.isSsoProfile = isSsoProfile; /***/ }), -/***/ 26272: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 94729: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['devicefarm'] = {}; -AWS.DeviceFarm = Service.defineService('devicefarm', ['2015-06-23']); -Object.defineProperty(apiLoader.services['devicefarm'], '2015-06-23', { - get: function get() { - var model = __nccwpck_require__(34023); - model.paginators = (__nccwpck_require__(37161)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DeviceFarm; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSSOCredentials = void 0; +const client_sso_1 = __nccwpck_require__(82666); +const token_providers_1 = __nccwpck_require__(52843); +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const EXPIRE_WINDOW_MS = 15 * 60 * 1000; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, profile, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, token_providers_1.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + } + else { + try { + token = await (0, shared_ini_file_loader_1.getSSOTokenFromFile)(ssoStartUrl); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= EXPIRE_WINDOW_MS) { + throw new property_provider_1.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + const { accessToken } = token; + const sso = ssoClient || new client_sso_1.SSOClient({ region: ssoRegion }); + let ssoResp; + try { + ssoResp = await sso.send(new client_sso_1.GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw property_provider_1.CredentialsProviderError.from(e, SHOULD_FAIL_CREDENTIAL_CHAIN); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration } = {} } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new property_provider_1.CredentialsProviderError("SSO returns an invalid temporary credential.", SHOULD_FAIL_CREDENTIAL_CHAIN); + } + return { accessKeyId, secretAccessKey, sessionToken, expiration: new Date(expiration) }; +}; +exports.resolveSSOCredentials = resolveSSOCredentials; /***/ }), -/***/ 90673: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 86623: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['devopsguru'] = {}; -AWS.DevOpsGuru = Service.defineService('devopsguru', ['2020-12-01']); -Object.defineProperty(apiLoader.services['devopsguru'], '2020-12-01', { - get: function get() { - var model = __nccwpck_require__(36592); - model.paginators = (__nccwpck_require__(95551)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DevOpsGuru; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 73783: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['directconnect'] = {}; -AWS.DirectConnect = Service.defineService('directconnect', ['2012-10-25']); -Object.defineProperty(apiLoader.services['directconnect'], '2012-10-25', { - get: function get() { - var model = __nccwpck_require__(45125); - model.paginators = (__nccwpck_require__(26404)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DirectConnect; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateSsoProfile = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const validateSsoProfile = (profile) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new property_provider_1.CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, false); + } + return profile; +}; +exports.validateSsoProfile = validateSsoProfile; /***/ }), -/***/ 83908: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35614: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['directoryservice'] = {}; -AWS.DirectoryService = Service.defineService('directoryservice', ['2015-04-16']); -Object.defineProperty(apiLoader.services['directoryservice'], '2015-04-16', { - get: function get() { - var model = __nccwpck_require__(47357); - model.paginators = (__nccwpck_require__(93412)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DirectoryService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromTokenFile = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fs_1 = __nccwpck_require__(57147); +const fromWebToken_1 = __nccwpck_require__(47905); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + var _a, _b, _c; + const webIdentityTokenFile = (_a = init === null || init === void 0 ? void 0 : init.webIdentityTokenFile) !== null && _a !== void 0 ? _a : process.env[ENV_TOKEN_FILE]; + const roleArn = (_b = init === null || init === void 0 ? void 0 : init.roleArn) !== null && _b !== void 0 ? _b : process.env[ENV_ROLE_ARN]; + const roleSessionName = (_c = init === null || init === void 0 ? void 0 : init.roleSessionName) !== null && _c !== void 0 ? _c : process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified"); + } + return (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); +}; +exports.fromTokenFile = fromTokenFile; /***/ }), -/***/ 81690: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 47905: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['discovery'] = {}; -AWS.Discovery = Service.defineService('discovery', ['2015-11-01']); -Object.defineProperty(apiLoader.services['discovery'], '2015-11-01', { - get: function get() { - var model = __nccwpck_require__(68951); - model.paginators = (__nccwpck_require__(19822)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Discovery; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromWebToken = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromWebToken = (init) => () => { + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds, roleAssumerWithWebIdentity, } = init; + if (!roleAssumerWithWebIdentity) { + throw new property_provider_1.CredentialsProviderError(`Role Arn '${roleArn}' needs to be assumed with web identity,` + + ` but no role assumption callback was provided.`, false); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName !== null && roleSessionName !== void 0 ? roleSessionName : `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; /***/ }), -/***/ 24958: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 15646: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dlm'] = {}; -AWS.DLM = Service.defineService('dlm', ['2018-01-12']); -Object.defineProperty(apiLoader.services['dlm'], '2018-01-12', { - get: function get() { - var model = __nccwpck_require__(75485); - model.paginators = (__nccwpck_require__(98881)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DLM; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(35614), exports); +tslib_1.__exportStar(__nccwpck_require__(47905), exports); /***/ }), -/***/ 69868: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 22545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dms'] = {}; -AWS.DMS = Service.defineService('dms', ['2016-01-01']); -Object.defineProperty(apiLoader.services['dms'], '2016-01-01', { - get: function get() { - var model = __nccwpck_require__(77953); - model.paginators = (__nccwpck_require__(36772)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(3500)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DMS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHostHeaderPlugin = exports.hostHeaderMiddlewareOptions = exports.hostHeaderMiddleware = exports.resolveHostHeaderConfig = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +function resolveHostHeaderConfig(input) { + return input; +} +exports.resolveHostHeaderConfig = resolveHostHeaderConfig; +const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!protocol_http_1.HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = ""; + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +exports.hostHeaderMiddleware = hostHeaderMiddleware; +exports.hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.hostHeaderMiddleware)(options), exports.hostHeaderMiddlewareOptions); + }, +}); +exports.getHostHeaderPlugin = getHostHeaderPlugin; /***/ }), -/***/ 55129: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20014: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['docdb'] = {}; -AWS.DocDB = Service.defineService('docdb', ['2014-10-31']); -__nccwpck_require__(59050); -Object.defineProperty(apiLoader.services['docdb'], '2014-10-31', { - get: function get() { - var model = __nccwpck_require__(4932); - model.paginators = (__nccwpck_require__(41408)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(36607)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DocDB; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(9754), exports); /***/ }), -/***/ 20792: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9754: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['docdbelastic'] = {}; -AWS.DocDBElastic = Service.defineService('docdbelastic', ['2022-11-28']); -Object.defineProperty(apiLoader.services['docdbelastic'], '2022-11-28', { - get: function get() { - var model = __nccwpck_require__(34162); - model.paginators = (__nccwpck_require__(89093)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DocDBElastic; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getLoggerPlugin = exports.loggerMiddlewareOptions = exports.loggerMiddleware = void 0; +const loggerMiddleware = () => (next, context) => async (args) => { + var _a, _b; + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog !== null && overrideInputFilterSensitiveLog !== void 0 ? overrideInputFilterSensitiveLog : context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog !== null && overrideOutputFilterSensitiveLog !== void 0 ? overrideOutputFilterSensitiveLog : context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + (_a = logger === null || logger === void 0 ? void 0 : logger.info) === null || _a === void 0 ? void 0 : _a.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog !== null && overrideInputFilterSensitiveLog !== void 0 ? overrideInputFilterSensitiveLog : context.inputFilterSensitiveLog; + (_b = logger === null || logger === void 0 ? void 0 : logger.error) === null || _b === void 0 ? void 0 : _b.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +exports.loggerMiddleware = loggerMiddleware; +exports.loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.loggerMiddleware)(), exports.loggerMiddlewareOptions); + }, +}); +exports.getLoggerPlugin = getLoggerPlugin; /***/ }), -/***/ 41116: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 85525: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['drs'] = {}; -AWS.Drs = Service.defineService('drs', ['2020-02-26']); -Object.defineProperty(apiLoader.services['drs'], '2020-02-26', { - get: function get() { - var model = __nccwpck_require__(42548); - model.paginators = (__nccwpck_require__(44057)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Drs; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRecursionDetectionPlugin = exports.addRecursionDetectionMiddlewareOptions = exports.recursionDetectionMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!protocol_http_1.HttpRequest.isInstance(request) || + options.runtime !== "node" || + request.headers.hasOwnProperty(TRACE_ID_HEADER_NAME)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +exports.recursionDetectionMiddleware = recursionDetectionMiddleware; +exports.addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.recursionDetectionMiddleware)(options), exports.addRecursionDetectionMiddlewareOptions); + }, +}); +exports.getRecursionDetectionPlugin = getRecursionDetectionPlugin; /***/ }), -/***/ 14347: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55959: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dynamodb'] = {}; -AWS.DynamoDB = Service.defineService('dynamodb', ['2011-12-05', '2012-08-10']); -__nccwpck_require__(17101); -Object.defineProperty(apiLoader.services['dynamodb'], '2011-12-05', { - get: function get() { - var model = __nccwpck_require__(46148); - model.paginators = (__nccwpck_require__(86884)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(24864)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['dynamodb'], '2012-08-10', { - get: function get() { - var model = __nccwpck_require__(54047); - model.paginators = (__nccwpck_require__(30482)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(48411)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DynamoDB; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveStsAuthConfig = void 0; +const middleware_signing_1 = __nccwpck_require__(14935); +const resolveStsAuthConfig = (input, { stsClientCtor }) => (0, middleware_signing_1.resolveAwsAuthConfig)({ + ...input, + stsClientCtor, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; /***/ }), -/***/ 88090: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 84193: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['dynamodbstreams'] = {}; -AWS.DynamoDBStreams = Service.defineService('dynamodbstreams', ['2012-08-10']); -Object.defineProperty(apiLoader.services['dynamodbstreams'], '2012-08-10', { - get: function get() { - var model = __nccwpck_require__(26098); - model.paginators = (__nccwpck_require__(40549)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.DynamoDBStreams; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveSigV4AuthConfig = exports.resolveAwsAuthConfig = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const signature_v4_1 = __nccwpck_require__(11528); +const util_middleware_1 = __nccwpck_require__(2390); +const CREDENTIAL_EXPIRE_WINDOW = 300000; +const resolveAwsAuthConfig = (input) => { + const normalizedCreds = input.credentials + ? normalizeCredentialProvider(input.credentials) + : input.credentialDefaultProvider(input); + const { signingEscapePath = true, systemClockOffset = input.systemClockOffset || 0, sha256 } = input; + let signer; + if (input.signer) { + signer = (0, util_middleware_1.normalizeProvider)(input.signer); + } + else if (input.regionInfoProvider) { + signer = () => (0, util_middleware_1.normalizeProvider)(input.region)() + .then(async (region) => [ + (await input.regionInfoProvider(region, { + useFipsEndpoint: await input.useFipsEndpoint(), + useDualstackEndpoint: await input.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + input.signingRegion = input.signingRegion || signingRegion || region; + input.signingName = input.signingName || signingService || input.serviceId; + const params = { + ...input, + credentials: normalizedCreds, + region: input.signingRegion, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = input.signerConstructor || signature_v4_1.SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: input.signingName || input.defaultSigningName, + signingRegion: await (0, util_middleware_1.normalizeProvider)(input.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + input.signingRegion = input.signingRegion || signingRegion; + input.signingName = input.signingName || signingService || input.serviceId; + const params = { + ...input, + credentials: normalizedCreds, + region: input.signingRegion, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = input.signerConstructor || signature_v4_1.SignatureV4; + return new SignerCtor(params); + }; + } + return { + ...input, + systemClockOffset, + signingEscapePath, + credentials: normalizedCreds, + signer, + }; +}; +exports.resolveAwsAuthConfig = resolveAwsAuthConfig; +const resolveSigV4AuthConfig = (input) => { + const normalizedCreds = input.credentials + ? normalizeCredentialProvider(input.credentials) + : input.credentialDefaultProvider(input); + const { signingEscapePath = true, systemClockOffset = input.systemClockOffset || 0, sha256 } = input; + let signer; + if (input.signer) { + signer = (0, util_middleware_1.normalizeProvider)(input.signer); + } + else { + signer = (0, util_middleware_1.normalizeProvider)(new signature_v4_1.SignatureV4({ + credentials: normalizedCreds, + region: input.region, + service: input.signingName, + sha256, + uriEscapePath: signingEscapePath, + })); + } + return { + ...input, + systemClockOffset, + signingEscapePath, + credentials: normalizedCreds, + signer, + }; +}; +exports.resolveSigV4AuthConfig = resolveSigV4AuthConfig; +const normalizeCredentialProvider = (credentials) => { + if (typeof credentials === "function") { + return (0, property_provider_1.memoize)(credentials, (credentials) => credentials.expiration !== undefined && + credentials.expiration.getTime() - Date.now() < CREDENTIAL_EXPIRE_WINDOW, (credentials) => credentials.expiration !== undefined); + } + return (0, util_middleware_1.normalizeProvider)(credentials); +}; /***/ }), -/***/ 62837: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 88053: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ebs'] = {}; -AWS.EBS = Service.defineService('ebs', ['2019-11-02']); -Object.defineProperty(apiLoader.services['ebs'], '2019-11-02', { - get: function get() { - var model = __nccwpck_require__(72220); - model.paginators = (__nccwpck_require__(85366)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EBS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSigV4AuthPlugin = exports.getAwsAuthPlugin = exports.awsAuthMiddlewareOptions = exports.awsAuthMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const getSkewCorrectedDate_1 = __nccwpck_require__(68253); +const getUpdatedSystemClockOffset_1 = __nccwpck_require__(35863); +const awsAuthMiddleware = (options) => (next, context) => async function (args) { + var _a, _b, _c, _d; + if (!protocol_http_1.HttpRequest.isInstance(args.request)) + return next(args); + const authScheme = (_c = (_b = (_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.properties) === null || _b === void 0 ? void 0 : _b.authSchemes) === null || _c === void 0 ? void 0 : _c[0]; + const multiRegionOverride = (authScheme === null || authScheme === void 0 ? void 0 : authScheme.name) === "sigv4a" ? (_d = authScheme === null || authScheme === void 0 ? void 0 : authScheme.signingRegionSet) === null || _d === void 0 ? void 0 : _d.join(",") : undefined; + const signer = await options.signer(authScheme); + const output = await next({ + ...args, + request: await signer.sign(args.request, { + signingDate: (0, getSkewCorrectedDate_1.getSkewCorrectedDate)(options.systemClockOffset), + signingRegion: multiRegionOverride || context["signing_region"], + signingService: context["signing_service"], + }), + }).catch((error) => { + var _a; + const serverTime = (_a = error.ServerTime) !== null && _a !== void 0 ? _a : getDateHeader(error.$response); + if (serverTime) { + options.systemClockOffset = (0, getUpdatedSystemClockOffset_1.getUpdatedSystemClockOffset)(serverTime, options.systemClockOffset); + } + throw error; + }); + const dateHeader = getDateHeader(output.response); + if (dateHeader) { + options.systemClockOffset = (0, getUpdatedSystemClockOffset_1.getUpdatedSystemClockOffset)(dateHeader, options.systemClockOffset); + } + return output; +}; +exports.awsAuthMiddleware = awsAuthMiddleware; +const getDateHeader = (response) => { var _a, _b, _c; return protocol_http_1.HttpResponse.isInstance(response) ? (_b = (_a = response.headers) === null || _a === void 0 ? void 0 : _a.date) !== null && _b !== void 0 ? _b : (_c = response.headers) === null || _c === void 0 ? void 0 : _c.Date : undefined; }; +exports.awsAuthMiddlewareOptions = { + name: "awsAuthMiddleware", + tags: ["SIGNATURE", "AWSAUTH"], + relation: "after", + toMiddleware: "retryMiddleware", + override: true, +}; +const getAwsAuthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, exports.awsAuthMiddleware)(options), exports.awsAuthMiddlewareOptions); + }, +}); +exports.getAwsAuthPlugin = getAwsAuthPlugin; +exports.getSigV4AuthPlugin = exports.getAwsAuthPlugin; /***/ }), -/***/ 7778: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 14935: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ec2'] = {}; -AWS.EC2 = Service.defineService('ec2', ['2013-06-15*', '2013-10-15*', '2014-02-01*', '2014-05-01*', '2014-06-15*', '2014-09-01*', '2014-10-01*', '2015-03-01*', '2015-04-15*', '2015-10-01*', '2016-04-01*', '2016-09-15*', '2016-11-15']); -__nccwpck_require__(92501); -Object.defineProperty(apiLoader.services['ec2'], '2016-11-15', { - get: function get() { - var model = __nccwpck_require__(2658); - model.paginators = (__nccwpck_require__(82477)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(19153)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EC2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(84193), exports); +tslib_1.__exportStar(__nccwpck_require__(88053), exports); /***/ }), -/***/ 92209: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 68253: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ec2instanceconnect'] = {}; -AWS.EC2InstanceConnect = Service.defineService('ec2instanceconnect', ['2018-04-02']); -Object.defineProperty(apiLoader.services['ec2instanceconnect'], '2018-04-02', { - get: function get() { - var model = __nccwpck_require__(36007); - model.paginators = (__nccwpck_require__(38333)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EC2InstanceConnect; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSkewCorrectedDate = void 0; +const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); +exports.getSkewCorrectedDate = getSkewCorrectedDate; /***/ }), -/***/ 15211: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35863: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ecr'] = {}; -AWS.ECR = Service.defineService('ecr', ['2015-09-21']); -Object.defineProperty(apiLoader.services['ecr'], '2015-09-21', { - get: function get() { - var model = __nccwpck_require__(92405); - model.paginators = (__nccwpck_require__(25504)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(78925)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ECR; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUpdatedSystemClockOffset = void 0; +const isClockSkewed_1 = __nccwpck_require__(85301); +const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if ((0, isClockSkewed_1.isClockSkewed)(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; +exports.getUpdatedSystemClockOffset = getUpdatedSystemClockOffset; /***/ }), -/***/ 90244: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 85301: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ecrpublic'] = {}; -AWS.ECRPUBLIC = Service.defineService('ecrpublic', ['2020-10-30']); -Object.defineProperty(apiLoader.services['ecrpublic'], '2020-10-30', { - get: function get() { - var model = __nccwpck_require__(9668); - model.paginators = (__nccwpck_require__(81193)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ECRPUBLIC; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isClockSkewed = void 0; +const getSkewCorrectedDate_1 = __nccwpck_require__(68253); +const isClockSkewed = (clockTime, systemClockOffset) => Math.abs((0, getSkewCorrectedDate_1.getSkewCorrectedDate)(systemClockOffset).getTime() - clockTime) >= 300000; +exports.isClockSkewed = isClockSkewed; /***/ }), -/***/ 16615: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 36546: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ecs'] = {}; -AWS.ECS = Service.defineService('ecs', ['2014-11-13']); -Object.defineProperty(apiLoader.services['ecs'], '2014-11-13', { - get: function get() { - var model = __nccwpck_require__(44208); - model.paginators = (__nccwpck_require__(15738)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(1299)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ECS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveUserAgentConfig = void 0; +function resolveUserAgentConfig(input) { + return { + ...input, + customUserAgent: typeof input.customUserAgent === "string" ? [[input.customUserAgent]] : input.customUserAgent, + }; +} +exports.resolveUserAgentConfig = resolveUserAgentConfig; /***/ }), -/***/ 34375: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28025: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['efs'] = {}; -AWS.EFS = Service.defineService('efs', ['2015-02-01']); -Object.defineProperty(apiLoader.services['efs'], '2015-02-01', { - get: function get() { - var model = __nccwpck_require__(54784); - model.paginators = (__nccwpck_require__(40174)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EFS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UA_ESCAPE_CHAR = exports.UA_VALUE_ESCAPE_REGEX = exports.UA_NAME_ESCAPE_REGEX = exports.UA_NAME_SEPARATOR = exports.SPACE = exports.X_AMZ_USER_AGENT = exports.USER_AGENT = void 0; +exports.USER_AGENT = "user-agent"; +exports.X_AMZ_USER_AGENT = "x-amz-user-agent"; +exports.SPACE = " "; +exports.UA_NAME_SEPARATOR = "/"; +exports.UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +exports.UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +exports.UA_ESCAPE_CHAR = "-"; /***/ }), -/***/ 23337: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 64688: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['eks'] = {}; -AWS.EKS = Service.defineService('eks', ['2017-11-01']); -Object.defineProperty(apiLoader.services['eks'], '2017-11-01', { - get: function get() { - var model = __nccwpck_require__(51370); - model.paginators = (__nccwpck_require__(36490)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(88058)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EKS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(36546), exports); +tslib_1.__exportStar(__nccwpck_require__(76236), exports); /***/ }), -/***/ 81065: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 76236: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elasticache'] = {}; -AWS.ElastiCache = Service.defineService('elasticache', ['2012-11-15*', '2014-03-24*', '2014-07-15*', '2014-09-30*', '2015-02-02']); -Object.defineProperty(apiLoader.services['elasticache'], '2015-02-02', { - get: function get() { - var model = __nccwpck_require__(58426); - model.paginators = (__nccwpck_require__(79559)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(29787)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ElastiCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentPlugin = exports.getUserAgentMiddlewareOptions = exports.userAgentMiddleware = void 0; +const util_endpoints_1 = __nccwpck_require__(13350); +const protocol_http_1 = __nccwpck_require__(64418); +const constants_1 = __nccwpck_require__(28025); +const userAgentMiddleware = (options) => (next, context) => async (args) => { + var _a, _b; + const { request } = args; + if (!protocol_http_1.HttpRequest.isInstance(request)) + return next(args); + const { headers } = request; + const userAgent = ((_a = context === null || context === void 0 ? void 0 : context.userAgent) === null || _a === void 0 ? void 0 : _a.map(escapeUserAgent)) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + const customUserAgent = ((_b = options === null || options === void 0 ? void 0 : options.customUserAgent) === null || _b === void 0 ? void 0 : _b.map(escapeUserAgent)) || []; + const prefix = (0, util_endpoints_1.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(constants_1.SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(constants_1.SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[constants_1.X_AMZ_USER_AGENT] = headers[constants_1.X_AMZ_USER_AGENT] + ? `${headers[constants_1.USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[constants_1.USER_AGENT] = sdkUserAgentValue; + } + else { + headers[constants_1.X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +exports.userAgentMiddleware = userAgentMiddleware; +const escapeUserAgent = (userAgentPair) => { + var _a; + const name = userAgentPair[0] + .split(constants_1.UA_NAME_SEPARATOR) + .map((part) => part.replace(constants_1.UA_NAME_ESCAPE_REGEX, constants_1.UA_ESCAPE_CHAR)) + .join(constants_1.UA_NAME_SEPARATOR); + const version = (_a = userAgentPair[1]) === null || _a === void 0 ? void 0 : _a.replace(constants_1.UA_VALUE_ESCAPE_REGEX, constants_1.UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(constants_1.UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +exports.getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.userAgentMiddleware)(config), exports.getUserAgentMiddlewareOptions); + }, +}); +exports.getUserAgentPlugin = getUserAgentPlugin; /***/ }), -/***/ 14897: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 68805: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elasticbeanstalk'] = {}; -AWS.ElasticBeanstalk = Service.defineService('elasticbeanstalk', ['2010-12-01']); -Object.defineProperty(apiLoader.services['elasticbeanstalk'], '2010-12-01', { - get: function get() { - var model = __nccwpck_require__(72508); - model.paginators = (__nccwpck_require__(72305)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(62534)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ElasticBeanstalk; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(74544), exports); /***/ }), -/***/ 37708: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 18711: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elasticinference'] = {}; -AWS.ElasticInference = Service.defineService('elasticinference', ['2017-07-25']); -Object.defineProperty(apiLoader.services['elasticinference'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(83967); - model.paginators = (__nccwpck_require__(64906)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ElasticInference; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = void 0; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; /***/ }), -/***/ 40745: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42420: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elastictranscoder'] = {}; -AWS.ElasticTranscoder = Service.defineService('elastictranscoder', ['2012-09-25']); -Object.defineProperty(apiLoader.services['elastictranscoder'], '2012-09-25', { - get: function get() { - var model = __nccwpck_require__(23463); - model.paginators = (__nccwpck_require__(36121)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(59345)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ElasticTranscoder; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getTransformedHeaders = void 0; +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +exports.getTransformedHeaders = getTransformedHeaders; /***/ }), -/***/ 10907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74544: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elb'] = {}; -AWS.ELB = Service.defineService('elb', ['2012-06-01']); -Object.defineProperty(apiLoader.services['elb'], '2012-06-01', { - get: function get() { - var model = __nccwpck_require__(66258); - model.paginators = (__nccwpck_require__(77372)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(56717)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ELB; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(71901), exports); +tslib_1.__exportStar(__nccwpck_require__(28320), exports); +tslib_1.__exportStar(__nccwpck_require__(93315), exports); /***/ }), -/***/ 44311: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71901: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['elbv2'] = {}; -AWS.ELBv2 = Service.defineService('elbv2', ['2015-12-01']); -Object.defineProperty(apiLoader.services['elbv2'], '2015-12-01', { - get: function get() { - var model = __nccwpck_require__(42628); - model.paginators = (__nccwpck_require__(12274)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(56106)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ELBv2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttpHandler = exports.DEFAULT_REQUEST_TIMEOUT = void 0; +const protocol_http_1 = __nccwpck_require__(15067); +const querystring_builder_1 = __nccwpck_require__(22220); +const http_1 = __nccwpck_require__(13685); +const https_1 = __nccwpck_require__(95687); +const constants_1 = __nccwpck_require__(18711); +const get_transformed_headers_1 = __nccwpck_require__(42420); +const set_connection_timeout_1 = __nccwpck_require__(28077); +const set_socket_keep_alive_1 = __nccwpck_require__(86507); +const set_socket_timeout_1 = __nccwpck_require__(98298); +const write_request_body_1 = __nccwpck_require__(48568); +exports.DEFAULT_REQUEST_TIMEOUT = 0; +class NodeHttpHandler { + constructor(options) { + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout !== null && requestTimeout !== void 0 ? requestTimeout : socketTimeout, + httpAgent: httpAgent || new http_1.Agent({ keepAlive, maxSockets }), + httpsAgent: httpsAgent || new https_1.Agent({ keepAlive, maxSockets }), + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) === null || _a === void 0 ? void 0 : _a.httpAgent) === null || _b === void 0 ? void 0 : _b.destroy(); + (_d = (_c = this.config) === null || _c === void 0 ? void 0 : _c.httpsAgent) === null || _d === void 0 ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + var _a, _b; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const queryString = (0, querystring_builder_1.buildQueryString)(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const nodeHttpsOptions = { + headers: request.headers, + host: request.hostname, + method: request.method, + path, + port: request.port, + agent: isSSL ? this.config.httpsAgent : this.config.httpAgent, + auth, + }; + const requestFunc = isSSL ? https_1.request : http_1.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + (0, set_connection_timeout_1.setConnectionTimeout)(req, reject, this.config.connectionTimeout); + (0, set_socket_timeout_1.setSocketTimeout)(req, reject, this.config.requestTimeout); + if (abortSignal) { + abortSignal.onabort = () => { + req.abort(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + } + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + (0, set_socket_keep_alive_1.setSocketKeepAlive)(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + }); + } + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, this.config.requestTimeout).catch(_reject); + }); + } +} +exports.NodeHttpHandler = NodeHttpHandler; /***/ }), -/***/ 50470: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59112: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['emr'] = {}; -AWS.EMR = Service.defineService('emr', ['2009-03-31']); -Object.defineProperty(apiLoader.services['emr'], '2009-03-31', { - get: function get() { - var model = __nccwpck_require__(91298); - model.paginators = (__nccwpck_require__(62965)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(86792)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EMR; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionManager = void 0; +const tslib_1 = __nccwpck_require__(4351); +const http2_1 = tslib_1.__importDefault(__nccwpck_require__(85158)); +const node_http2_connection_pool_1 = __nccwpck_require__(37090); +class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2_1.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new node_http2_connection_pool_1.NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) === null || _a === void 0 ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} +exports.NodeHttp2ConnectionManager = NodeHttp2ConnectionManager; /***/ }), -/***/ 49984: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37090: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['emrcontainers'] = {}; -AWS.EMRcontainers = Service.defineService('emrcontainers', ['2020-10-01']); -Object.defineProperty(apiLoader.services['emrcontainers'], '2020-10-01', { - get: function get() { - var model = __nccwpck_require__(33922); - model.paginators = (__nccwpck_require__(87789)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EMRcontainers; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionPool = void 0; +class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions !== null && sessions !== void 0 ? sessions : []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} +exports.NodeHttp2ConnectionPool = NodeHttp2ConnectionPool; /***/ }), -/***/ 219: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28320: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['emrserverless'] = {}; -AWS.EMRServerless = Service.defineService('emrserverless', ['2021-07-13']); -Object.defineProperty(apiLoader.services['emrserverless'], '2021-07-13', { - get: function get() { - var model = __nccwpck_require__(41070); - model.paginators = (__nccwpck_require__(39521)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EMRServerless; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2Handler = void 0; +const protocol_http_1 = __nccwpck_require__(15067); +const querystring_builder_1 = __nccwpck_require__(22220); +const http2_1 = __nccwpck_require__(85158); +const get_transformed_headers_1 = __nccwpck_require__(42420); +const node_http2_connection_manager_1 = __nccwpck_require__(59112); +const write_request_body_1 = __nccwpck_require__(48568); +class NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new node_http2_connection_manager_1.NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a, _b, _c; + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_c = this.config) === null || _c === void 0 ? void 0 : _c.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = (0, querystring_builder_1.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [http2_1.constants.HTTP2_HEADER_PATH]: path, + [http2_1.constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + abortSignal.onabort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, requestTimeout); + }); + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} +exports.NodeHttp2Handler = NodeHttp2Handler; /***/ }), -/***/ 84462: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28077: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['es'] = {}; -AWS.ES = Service.defineService('es', ['2015-01-01']); -Object.defineProperty(apiLoader.services['es'], '2015-01-01', { - get: function get() { - var model = __nccwpck_require__(33943); - model.paginators = (__nccwpck_require__(78836)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ES; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setConnectionTimeout = void 0; +const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return; + } + const timeoutId = setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs); + request.on("socket", (socket) => { + if (socket.connecting) { + socket.on("connect", () => { + clearTimeout(timeoutId); + }); + } + else { + clearTimeout(timeoutId); + } + }); +}; +exports.setConnectionTimeout = setConnectionTimeout; /***/ }), -/***/ 898: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 86507: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['eventbridge'] = {}; -AWS.EventBridge = Service.defineService('eventbridge', ['2015-10-07']); -__nccwpck_require__(3034); -Object.defineProperty(apiLoader.services['eventbridge'], '2015-10-07', { - get: function get() { - var model = __nccwpck_require__(9659); - model.paginators = (__nccwpck_require__(10871)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.EventBridge; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketKeepAlive = void 0; +const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }) => { + if (keepAlive !== true) { + return; + } + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); +}; +exports.setSocketKeepAlive = setSocketKeepAlive; /***/ }), -/***/ 21440: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98298: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['evidently'] = {}; -AWS.Evidently = Service.defineService('evidently', ['2021-02-01']); -Object.defineProperty(apiLoader.services['evidently'], '2021-02-01', { - get: function get() { - var model = __nccwpck_require__(41971); - model.paginators = (__nccwpck_require__(72960)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Evidently; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketTimeout = void 0; +const setSocketTimeout = (request, reject, timeoutInMs = 0) => { + request.setTimeout(timeoutInMs, () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }); +}; +exports.setSocketTimeout = setSocketTimeout; /***/ }), -/***/ 3052: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 25874: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['finspace'] = {}; -AWS.Finspace = Service.defineService('finspace', ['2021-03-12']); -Object.defineProperty(apiLoader.services['finspace'], '2021-03-12', { - get: function get() { - var model = __nccwpck_require__(37836); - model.paginators = (__nccwpck_require__(7328)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Finspace; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Collector = void 0; +const stream_1 = __nccwpck_require__(12781); +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} +exports.Collector = Collector; /***/ }), -/***/ 96869: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 93315: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['finspacedata'] = {}; -AWS.Finspacedata = Service.defineService('finspacedata', ['2020-07-13']); -Object.defineProperty(apiLoader.services['finspacedata'], '2020-07-13', { - get: function get() { - var model = __nccwpck_require__(83394); - model.paginators = (__nccwpck_require__(70371)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Finspacedata; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.streamCollector = void 0; +const collector_1 = __nccwpck_require__(25874); +const streamCollector = (stream) => new Promise((resolve, reject) => { + const collector = new collector_1.Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); +}); +exports.streamCollector = streamCollector; /***/ }), -/***/ 92831: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48568: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['firehose'] = {}; -AWS.Firehose = Service.defineService('firehose', ['2015-08-04']); -Object.defineProperty(apiLoader.services['firehose'], '2015-08-04', { - get: function get() { - var model = __nccwpck_require__(48886); - model.paginators = (__nccwpck_require__(47400)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Firehose; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeRequestBody = void 0; +const stream_1 = __nccwpck_require__(12781); +const MIN_WAIT_TIME = 1000; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + var _a; + const headers = (_a = request.headers) !== null && _a !== void 0 ? _a : {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let hasError = false; + if (expect === "100-continue") { + await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + clearTimeout(timeoutId); + resolve(); + }); + httpRequest.on("error", () => { + hasError = true; + clearTimeout(timeoutId); + resolve(); + }); + }), + ]); + } + if (!hasError) { + writeBody(httpRequest, request.body); + } +} +exports.writeRequestBody = writeRequestBody; +function writeBody(httpRequest, body) { + if (body instanceof stream_1.Readable) { + body.pipe(httpRequest); + } + else if (body) { + httpRequest.end(Buffer.from(body)); + } + else { + httpRequest.end(); + } +} /***/ }), -/***/ 73003: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 4630: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['fis'] = {}; -AWS.Fis = Service.defineService('fis', ['2020-12-01']); -Object.defineProperty(apiLoader.services['fis'], '2020-12-01', { - get: function get() { - var model = __nccwpck_require__(98356); - model.paginators = (__nccwpck_require__(6544)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Fis; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Field = void 0; +const types_1 = __nccwpck_require__(4338); +class Field { + constructor({ name, kind = types_1.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} +exports.Field = Field; /***/ }), -/***/ 11316: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 22675: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['fms'] = {}; -AWS.FMS = Service.defineService('fms', ['2018-01-01']); -Object.defineProperty(apiLoader.services['fms'], '2018-01-01', { - get: function get() { - var model = __nccwpck_require__(22212); - model.paginators = (__nccwpck_require__(49570)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.FMS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Fields = void 0; +class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} +exports.Fields = Fields; /***/ }), -/***/ 36822: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 53882: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['forecastqueryservice'] = {}; -AWS.ForecastQueryService = Service.defineService('forecastqueryservice', ['2018-06-26']); -Object.defineProperty(apiLoader.services['forecastqueryservice'], '2018-06-26', { - get: function get() { - var model = __nccwpck_require__(23865); - model.paginators = (__nccwpck_require__(98135)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ForecastQueryService; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 12942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21940: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['forecastservice'] = {}; -AWS.ForecastService = Service.defineService('forecastservice', ['2018-06-26']); -Object.defineProperty(apiLoader.services['forecastservice'], '2018-06-26', { - get: function get() { - var model = __nccwpck_require__(6468); - model.paginators = (__nccwpck_require__(45338)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ForecastService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpRequest = void 0; +class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static isInstance(request) { + if (!request) + return false; + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + const cloned = new HttpRequest({ + ...this, + headers: { ...this.headers }, + }); + if (cloned.query) + cloned.query = cloneQuery(cloned.query); + return cloned; + } +} +exports.HttpRequest = HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} /***/ }), -/***/ 99830: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 78993: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['frauddetector'] = {}; -AWS.FraudDetector = Service.defineService('frauddetector', ['2019-11-15']); -Object.defineProperty(apiLoader.services['frauddetector'], '2019-11-15', { - get: function get() { - var model = __nccwpck_require__(96105); - model.paginators = (__nccwpck_require__(9177)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.FraudDetector; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpResponse = void 0; +class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} +exports.HttpResponse = HttpResponse; /***/ }), -/***/ 60642: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 15067: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['fsx'] = {}; -AWS.FSx = Service.defineService('fsx', ['2018-03-01']); -Object.defineProperty(apiLoader.services['fsx'], '2018-03-01', { - get: function get() { - var model = __nccwpck_require__(58245); - model.paginators = (__nccwpck_require__(19882)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.FSx; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(4630), exports); +tslib_1.__exportStar(__nccwpck_require__(22675), exports); +tslib_1.__exportStar(__nccwpck_require__(53882), exports); +tslib_1.__exportStar(__nccwpck_require__(21940), exports); +tslib_1.__exportStar(__nccwpck_require__(78993), exports); +tslib_1.__exportStar(__nccwpck_require__(7241), exports); +tslib_1.__exportStar(__nccwpck_require__(21146), exports); /***/ }), -/***/ 8085: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7241: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['gamelift'] = {}; -AWS.GameLift = Service.defineService('gamelift', ['2015-10-01']); -Object.defineProperty(apiLoader.services['gamelift'], '2015-10-01', { - get: function get() { - var model = __nccwpck_require__(69257); - model.paginators = (__nccwpck_require__(88381)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GameLift; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostname = void 0; +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +exports.isValidHostname = isValidHostname; /***/ }), -/***/ 83025: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21146: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['gamesparks'] = {}; -AWS.GameSparks = Service.defineService('gamesparks', ['2021-08-17']); -Object.defineProperty(apiLoader.services['gamesparks'], '2021-08-17', { - get: function get() { - var model = __nccwpck_require__(54092); - model.paginators = (__nccwpck_require__(51734)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GameSparks; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 63249: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 22220: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['glacier'] = {}; -AWS.Glacier = Service.defineService('glacier', ['2012-06-01']); -__nccwpck_require__(14472); -Object.defineProperty(apiLoader.services['glacier'], '2012-06-01', { - get: function get() { - var model = __nccwpck_require__(11545); - model.paginators = (__nccwpck_require__(54145)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(65182)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Glacier; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildQueryString = void 0; +const util_uri_escape_1 = __nccwpck_require__(9014); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, util_uri_escape_1.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, util_uri_escape_1.escapeUri)(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +exports.buildQueryString = buildQueryString; /***/ }), -/***/ 19306: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89218: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['globalaccelerator'] = {}; -AWS.GlobalAccelerator = Service.defineService('globalaccelerator', ['2018-08-08']); -Object.defineProperty(apiLoader.services['globalaccelerator'], '2018-08-08', { - get: function get() { - var model = __nccwpck_require__(35365); - model.paginators = (__nccwpck_require__(14796)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GlobalAccelerator; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 31658: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 47641: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['glue'] = {}; -AWS.Glue = Service.defineService('glue', ['2017-03-31']); -Object.defineProperty(apiLoader.services['glue'], '2017-03-31', { - get: function get() { - var model = __nccwpck_require__(72268); - model.paginators = (__nccwpck_require__(26545)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Glue; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation = exports.HttpAuthLocation || (exports.HttpAuthLocation = {})); /***/ }), -/***/ 51050: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 39548: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['grafana'] = {}; -AWS.Grafana = Service.defineService('grafana', ['2020-08-18']); -Object.defineProperty(apiLoader.services['grafana'], '2020-08-18', { - get: function get() { - var model = __nccwpck_require__(29655); - model.paginators = (__nccwpck_require__(83188)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Grafana; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 20690: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 31001: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['greengrass'] = {}; -AWS.Greengrass = Service.defineService('greengrass', ['2017-06-07']); -Object.defineProperty(apiLoader.services['greengrass'], '2017-06-07', { - get: function get() { - var model = __nccwpck_require__(72575); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Greengrass; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 45126: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 39262: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['greengrassv2'] = {}; -AWS.GreengrassV2 = Service.defineService('greengrassv2', ['2020-11-30']); -Object.defineProperty(apiLoader.services['greengrassv2'], '2020-11-30', { - get: function get() { - var model = __nccwpck_require__(57546); - model.paginators = (__nccwpck_require__(47961)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GreengrassV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 80494: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 32820: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['groundstation'] = {}; -AWS.GroundStation = Service.defineService('groundstation', ['2019-05-23']); -Object.defineProperty(apiLoader.services['groundstation'], '2019-05-23', { - get: function get() { - var model = __nccwpck_require__(27733); - model.paginators = (__nccwpck_require__(55974)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(77815)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GroundStation; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 40755: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 45777: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['guardduty'] = {}; -AWS.GuardDuty = Service.defineService('guardduty', ['2017-11-28']); -Object.defineProperty(apiLoader.services['guardduty'], '2017-11-28', { - get: function get() { - var model = __nccwpck_require__(37793); - model.paginators = (__nccwpck_require__(87510)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.GuardDuty; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 21834: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2765: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['health'] = {}; -AWS.Health = Service.defineService('health', ['2016-08-04']); -Object.defineProperty(apiLoader.services['health'], '2016-08-04', { - get: function get() { - var model = __nccwpck_require__(8618); - model.paginators = (__nccwpck_require__(46725)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Health; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(45777), exports); +tslib_1.__exportStar(__nccwpck_require__(20595), exports); +tslib_1.__exportStar(__nccwpck_require__(80159), exports); /***/ }), -/***/ 64254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20595: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['healthlake'] = {}; -AWS.HealthLake = Service.defineService('healthlake', ['2017-07-01']); -Object.defineProperty(apiLoader.services['healthlake'], '2017-07-01', { - get: function get() { - var model = __nccwpck_require__(13637); - model.paginators = (__nccwpck_require__(92834)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.HealthLake; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 38889: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80159: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['honeycode'] = {}; -AWS.Honeycode = Service.defineService('honeycode', ['2020-03-01']); -Object.defineProperty(apiLoader.services['honeycode'], '2020-03-01', { - get: function get() { - var model = __nccwpck_require__(27577); - model.paginators = (__nccwpck_require__(12243)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Honeycode; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 50058: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9076: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iam'] = {}; -AWS.IAM = Service.defineService('iam', ['2010-05-08']); -Object.defineProperty(apiLoader.services['iam'], '2010-05-08', { - get: function get() { - var model = __nccwpck_require__(27041); - model.paginators = (__nccwpck_require__(97583)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(37757)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IAM; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 60222: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 70686: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['identitystore'] = {}; -AWS.IdentityStore = Service.defineService('identitystore', ['2020-06-15']); -Object.defineProperty(apiLoader.services['identitystore'], '2020-06-15', { - get: function get() { - var model = __nccwpck_require__(75797); - model.paginators = (__nccwpck_require__(44872)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IdentityStore; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 57511: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26916: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['imagebuilder'] = {}; -AWS.Imagebuilder = Service.defineService('imagebuilder', ['2019-12-02']); -Object.defineProperty(apiLoader.services['imagebuilder'], '2019-12-02', { - get: function get() { - var model = __nccwpck_require__(98139); - model.paginators = (__nccwpck_require__(60410)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Imagebuilder; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme = exports.EndpointURLScheme || (exports.EndpointURLScheme = {})); /***/ }), -/***/ 6769: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 56340: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['importexport'] = {}; -AWS.ImportExport = Service.defineService('importexport', ['2010-06-01']); -Object.defineProperty(apiLoader.services['importexport'], '2010-06-01', { - get: function get() { - var model = __nccwpck_require__(80317); - model.paginators = (__nccwpck_require__(58037)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ImportExport; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 89439: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 8872: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['inspector'] = {}; -AWS.Inspector = Service.defineService('inspector', ['2015-08-18*', '2016-02-16']); -Object.defineProperty(apiLoader.services['inspector'], '2016-02-16', { - get: function get() { - var model = __nccwpck_require__(71649); - model.paginators = (__nccwpck_require__(69242)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Inspector; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 98650: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 58097: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['inspector2'] = {}; -AWS.Inspector2 = Service.defineService('inspector2', ['2020-06-08']); -Object.defineProperty(apiLoader.services['inspector2'], '2020-06-08', { - get: function get() { - var model = __nccwpck_require__(61291); - model.paginators = (__nccwpck_require__(17472)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Inspector2; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 84099: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35058: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['internetmonitor'] = {}; -AWS.InternetMonitor = Service.defineService('internetmonitor', ['2021-06-03']); -Object.defineProperty(apiLoader.services['internetmonitor'], '2021-06-03', { - get: function get() { - var model = __nccwpck_require__(62158); - model.paginators = (__nccwpck_require__(64409)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(76543)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.InternetMonitor; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 98392: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 85952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iot'] = {}; -AWS.Iot = Service.defineService('iot', ['2015-05-28']); -Object.defineProperty(apiLoader.services['iot'], '2015-05-28', { - get: function get() { - var model = __nccwpck_require__(40063); - model.paginators = (__nccwpck_require__(43999)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Iot; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(56340), exports); +tslib_1.__exportStar(__nccwpck_require__(8872), exports); +tslib_1.__exportStar(__nccwpck_require__(58097), exports); +tslib_1.__exportStar(__nccwpck_require__(75741), exports); +tslib_1.__exportStar(__nccwpck_require__(35058), exports); /***/ }), -/***/ 39474: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 75741: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iot1clickdevicesservice'] = {}; -AWS.IoT1ClickDevicesService = Service.defineService('iot1clickdevicesservice', ['2018-05-14']); -Object.defineProperty(apiLoader.services['iot1clickdevicesservice'], '2018-05-14', { - get: function get() { - var model = __nccwpck_require__(26663); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoT1ClickDevicesService; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 4686: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 29813: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iot1clickprojects'] = {}; -AWS.IoT1ClickProjects = Service.defineService('iot1clickprojects', ['2018-05-14']); -Object.defineProperty(apiLoader.services['iot1clickprojects'], '2018-05-14', { - get: function get() { - var model = __nccwpck_require__(17364); - model.paginators = (__nccwpck_require__(54033)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoT1ClickProjects; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 67409: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71968: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotanalytics'] = {}; -AWS.IoTAnalytics = Service.defineService('iotanalytics', ['2017-11-27']); -Object.defineProperty(apiLoader.services['iotanalytics'], '2017-11-27', { - get: function get() { - var model = __nccwpck_require__(84609); - model.paginators = (__nccwpck_require__(45498)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTAnalytics; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldPosition = void 0; +var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition = exports.FieldPosition || (exports.FieldPosition = {})); /***/ }), -/***/ 6564: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 87433: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotdata'] = {}; -AWS.IotData = Service.defineService('iotdata', ['2015-05-28']); -__nccwpck_require__(27062); -Object.defineProperty(apiLoader.services['iotdata'], '2015-05-28', { - get: function get() { - var model = __nccwpck_require__(21717); - model.paginators = (__nccwpck_require__(31896)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IotData; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 97569: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42320: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotdeviceadvisor'] = {}; -AWS.IotDeviceAdvisor = Service.defineService('iotdeviceadvisor', ['2020-09-18']); -Object.defineProperty(apiLoader.services['iotdeviceadvisor'], '2020-09-18', { - get: function get() { - var model = __nccwpck_require__(71394); - model.paginators = (__nccwpck_require__(49057)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IotDeviceAdvisor; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 88065: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 10369: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotevents'] = {}; -AWS.IoTEvents = Service.defineService('iotevents', ['2018-07-27']); -Object.defineProperty(apiLoader.services['iotevents'], '2018-07-27', { - get: function get() { - var model = __nccwpck_require__(4483); - model.paginators = (__nccwpck_require__(39844)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTEvents; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(87433), exports); +tslib_1.__exportStar(__nccwpck_require__(42320), exports); /***/ }), -/***/ 56973: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 4338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ioteventsdata'] = {}; -AWS.IoTEventsData = Service.defineService('ioteventsdata', ['2018-10-23']); -Object.defineProperty(apiLoader.services['ioteventsdata'], '2018-10-23', { - get: function get() { - var model = __nccwpck_require__(94282); - model.paginators = (__nccwpck_require__(11632)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(89218), exports); +tslib_1.__exportStar(__nccwpck_require__(47641), exports); +tslib_1.__exportStar(__nccwpck_require__(39548), exports); +tslib_1.__exportStar(__nccwpck_require__(31001), exports); +tslib_1.__exportStar(__nccwpck_require__(39262), exports); +tslib_1.__exportStar(__nccwpck_require__(32820), exports); +tslib_1.__exportStar(__nccwpck_require__(2765), exports); +tslib_1.__exportStar(__nccwpck_require__(9076), exports); +tslib_1.__exportStar(__nccwpck_require__(70686), exports); +tslib_1.__exportStar(__nccwpck_require__(26916), exports); +tslib_1.__exportStar(__nccwpck_require__(85952), exports); +tslib_1.__exportStar(__nccwpck_require__(29813), exports); +tslib_1.__exportStar(__nccwpck_require__(71968), exports); +tslib_1.__exportStar(__nccwpck_require__(10369), exports); +tslib_1.__exportStar(__nccwpck_require__(28103), exports); +tslib_1.__exportStar(__nccwpck_require__(70007), exports); +tslib_1.__exportStar(__nccwpck_require__(71680), exports); +tslib_1.__exportStar(__nccwpck_require__(10219), exports); +tslib_1.__exportStar(__nccwpck_require__(91141), exports); +tslib_1.__exportStar(__nccwpck_require__(87846), exports); +tslib_1.__exportStar(__nccwpck_require__(81523), exports); +tslib_1.__exportStar(__nccwpck_require__(41596), exports); +tslib_1.__exportStar(__nccwpck_require__(62087), exports); +tslib_1.__exportStar(__nccwpck_require__(80572), exports); +tslib_1.__exportStar(__nccwpck_require__(53702), exports); +tslib_1.__exportStar(__nccwpck_require__(98855), exports); +tslib_1.__exportStar(__nccwpck_require__(64285), exports); +tslib_1.__exportStar(__nccwpck_require__(75970), exports); +tslib_1.__exportStar(__nccwpck_require__(99324), exports); +tslib_1.__exportStar(__nccwpck_require__(52356), exports); +tslib_1.__exportStar(__nccwpck_require__(2785), exports); +tslib_1.__exportStar(__nccwpck_require__(26030), exports); +tslib_1.__exportStar(__nccwpck_require__(73081), exports); + + +/***/ }), + +/***/ 28103: +/***/ ((__unused_webpack_module, exports) => { -module.exports = AWS.IoTEventsData; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 42513: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 70007: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotfleethub'] = {}; -AWS.IoTFleetHub = Service.defineService('iotfleethub', ['2020-11-03']); -Object.defineProperty(apiLoader.services['iotfleethub'], '2020-11-03', { - get: function get() { - var model = __nccwpck_require__(56534); - model.paginators = (__nccwpck_require__(76120)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTFleetHub; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 94329: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71680: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotfleetwise'] = {}; -AWS.IoTFleetWise = Service.defineService('iotfleetwise', ['2021-06-17']); -Object.defineProperty(apiLoader.services['iotfleetwise'], '2021-06-17', { - get: function get() { - var model = __nccwpck_require__(68937); - model.paginators = (__nccwpck_require__(85715)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(23391)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTFleetWise; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 42332: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 10219: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotjobsdataplane'] = {}; -AWS.IoTJobsDataPlane = Service.defineService('iotjobsdataplane', ['2017-09-29']); -Object.defineProperty(apiLoader.services['iotjobsdataplane'], '2017-09-29', { - get: function get() { - var model = __nccwpck_require__(12147); - model.paginators = (__nccwpck_require__(58593)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTJobsDataPlane; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 22163: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 91141: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotroborunner'] = {}; -AWS.IoTRoboRunner = Service.defineService('iotroborunner', ['2018-05-10']); -Object.defineProperty(apiLoader.services['iotroborunner'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(11483); - model.paginators = (__nccwpck_require__(82393)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTRoboRunner; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 98562: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 87846: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotsecuretunneling'] = {}; -AWS.IoTSecureTunneling = Service.defineService('iotsecuretunneling', ['2018-10-05']); -Object.defineProperty(apiLoader.services['iotsecuretunneling'], '2018-10-05', { - get: function get() { - var model = __nccwpck_require__(99946); - model.paginators = (__nccwpck_require__(97884)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTSecureTunneling; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 89690: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81523: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotsitewise'] = {}; -AWS.IoTSiteWise = Service.defineService('iotsitewise', ['2019-12-02']); -Object.defineProperty(apiLoader.services['iotsitewise'], '2019-12-02', { - get: function get() { - var model = __nccwpck_require__(44429); - model.paginators = (__nccwpck_require__(27558)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(80458)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTSiteWise; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 58905: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 41596: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotthingsgraph'] = {}; -AWS.IoTThingsGraph = Service.defineService('iotthingsgraph', ['2018-09-06']); -Object.defineProperty(apiLoader.services['iotthingsgraph'], '2018-09-06', { - get: function get() { - var model = __nccwpck_require__(84893); - model.paginators = (__nccwpck_require__(99418)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTThingsGraph; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 65010: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 62087: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iottwinmaker'] = {}; -AWS.IoTTwinMaker = Service.defineService('iottwinmaker', ['2021-11-29']); -Object.defineProperty(apiLoader.services['iottwinmaker'], '2021-11-29', { - get: function get() { - var model = __nccwpck_require__(30382); - model.paginators = (__nccwpck_require__(93389)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(41496)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTTwinMaker; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 8226: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80572: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['iotwireless'] = {}; -AWS.IoTWireless = Service.defineService('iotwireless', ['2020-11-22']); -Object.defineProperty(apiLoader.services['iotwireless'], '2020-11-22', { - get: function get() { - var model = __nccwpck_require__(78052); - model.paginators = (__nccwpck_require__(13156)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IoTWireless; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 67701: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 53702: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ivs'] = {}; -AWS.IVS = Service.defineService('ivs', ['2020-07-14']); -Object.defineProperty(apiLoader.services['ivs'], '2020-07-14', { - get: function get() { - var model = __nccwpck_require__(34175); - model.paginators = (__nccwpck_require__(45289)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IVS; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 17077: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98855: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ivschat'] = {}; -AWS.Ivschat = Service.defineService('ivschat', ['2020-07-14']); -Object.defineProperty(apiLoader.services['ivschat'], '2020-07-14', { - get: function get() { - var model = __nccwpck_require__(77512); - model.paginators = (__nccwpck_require__(85556)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Ivschat; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 51946: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 64285: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ivsrealtime'] = {}; -AWS.IVSRealTime = Service.defineService('ivsrealtime', ['2020-07-14']); -Object.defineProperty(apiLoader.services['ivsrealtime'], '2020-07-14', { - get: function get() { - var model = __nccwpck_require__(23084); - model.paginators = (__nccwpck_require__(64507)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.IVSRealTime; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 56775: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 75970: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kafka'] = {}; -AWS.Kafka = Service.defineService('kafka', ['2018-11-14']); -Object.defineProperty(apiLoader.services['kafka'], '2018-11-14', { - get: function get() { - var model = __nccwpck_require__(38473); - model.paginators = (__nccwpck_require__(79729)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Kafka; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol = exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {})); /***/ }), -/***/ 61879: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 99324: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kafkaconnect'] = {}; -AWS.KafkaConnect = Service.defineService('kafkaconnect', ['2021-09-14']); -Object.defineProperty(apiLoader.services['kafkaconnect'], '2021-09-14', { - get: function get() { - var model = __nccwpck_require__(80867); - model.paginators = (__nccwpck_require__(32924)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KafkaConnect; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 66122: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52356: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kendra'] = {}; -AWS.Kendra = Service.defineService('kendra', ['2019-02-03']); -Object.defineProperty(apiLoader.services['kendra'], '2019-02-03', { - get: function get() { - var model = __nccwpck_require__(80100); - model.paginators = (__nccwpck_require__(64519)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Kendra; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 46255: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2785: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kendraranking'] = {}; -AWS.KendraRanking = Service.defineService('kendraranking', ['2022-10-19']); -Object.defineProperty(apiLoader.services['kendraranking'], '2022-10-19', { - get: function get() { - var model = __nccwpck_require__(66044); - model.paginators = (__nccwpck_require__(38563)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KendraRanking; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 24789: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26030: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['keyspaces'] = {}; -AWS.Keyspaces = Service.defineService('keyspaces', ['2022-02-10']); -Object.defineProperty(apiLoader.services['keyspaces'], '2022-02-10', { - get: function get() { - var model = __nccwpck_require__(59857); - model.paginators = (__nccwpck_require__(19252)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(53164)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Keyspaces; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 49876: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 73081: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesis'] = {}; -AWS.Kinesis = Service.defineService('kinesis', ['2013-12-02']); -Object.defineProperty(apiLoader.services['kinesis'], '2013-12-02', { - get: function get() { - var model = __nccwpck_require__(648); - model.paginators = (__nccwpck_require__(10424)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(54059)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Kinesis; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 90042: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26250: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisanalytics'] = {}; -AWS.KinesisAnalytics = Service.defineService('kinesisanalytics', ['2015-08-14']); -Object.defineProperty(apiLoader.services['kinesisanalytics'], '2015-08-14', { - get: function get() { - var model = __nccwpck_require__(72653); - model.paginators = (__nccwpck_require__(73535)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisAnalytics; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUriPath = void 0; +const escape_uri_1 = __nccwpck_require__(93186); +const escapeUriPath = (uri) => uri.split("/").map(escape_uri_1.escapeUri).join("/"); +exports.escapeUriPath = escapeUriPath; /***/ }), -/***/ 74631: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 93186: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisanalyticsv2'] = {}; -AWS.KinesisAnalyticsV2 = Service.defineService('kinesisanalyticsv2', ['2018-05-23']); -Object.defineProperty(apiLoader.services['kinesisanalyticsv2'], '2018-05-23', { - get: function get() { - var model = __nccwpck_require__(56485); - model.paginators = (__nccwpck_require__(52495)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisAnalyticsV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUri = void 0; +const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +exports.escapeUri = escapeUri; +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; /***/ }), -/***/ 89927: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9014: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisvideo'] = {}; -AWS.KinesisVideo = Service.defineService('kinesisvideo', ['2017-09-30']); -Object.defineProperty(apiLoader.services['kinesisvideo'], '2017-09-30', { - get: function get() { - var model = __nccwpck_require__(96305); - model.paginators = (__nccwpck_require__(50061)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisVideo; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(93186), exports); +tslib_1.__exportStar(__nccwpck_require__(26250), exports); /***/ }), -/***/ 5580: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52664: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisvideoarchivedmedia'] = {}; -AWS.KinesisVideoArchivedMedia = Service.defineService('kinesisvideoarchivedmedia', ['2017-09-30']); -Object.defineProperty(apiLoader.services['kinesisvideoarchivedmedia'], '2017-09-30', { - get: function get() { - var model = __nccwpck_require__(78868); - model.paginators = (__nccwpck_require__(27352)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisVideoArchivedMedia; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UnsupportedGrantTypeException = exports.UnauthorizedClientException = exports.SlowDownException = exports.SSOOIDCClient = exports.InvalidScopeException = exports.InvalidRequestException = exports.InvalidClientException = exports.InternalServerException = exports.ExpiredTokenException = exports.CreateTokenCommand = exports.AuthorizationPendingException = exports.AccessDeniedException = void 0; +const middleware_host_header_1 = __nccwpck_require__(22545); +const middleware_logger_1 = __nccwpck_require__(20014); +const middleware_recursion_detection_1 = __nccwpck_require__(85525); +const middleware_user_agent_1 = __nccwpck_require__(64688); +const config_resolver_1 = __nccwpck_require__(53098); +const middleware_content_length_1 = __nccwpck_require__(82800); +const middleware_endpoint_1 = __nccwpck_require__(82918); +const middleware_retry_1 = __nccwpck_require__(96039); +const smithy_client_1 = __nccwpck_require__(63570); +var resolveClientEndpointParameters = (options) => { + var _a, _b; + return { + ...options, + useDualstackEndpoint: (_a = options.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false, + useFipsEndpoint: (_b = options.useFipsEndpoint) !== null && _b !== void 0 ? _b : false, + defaultSigningName: "awsssooidc" + }; +}; +var package_default = { version: "3.387.0" }; +const util_user_agent_node_1 = __nccwpck_require__(98095); +const config_resolver_2 = __nccwpck_require__(53098); +const hash_node_1 = __nccwpck_require__(3081); +const middleware_retry_2 = __nccwpck_require__(96039); +const node_config_provider_1 = __nccwpck_require__(33461); +const node_http_handler_1 = __nccwpck_require__(20258); +const util_body_length_node_1 = __nccwpck_require__(68075); +const util_retry_1 = __nccwpck_require__(84902); +const smithy_client_2 = __nccwpck_require__(63570); +const url_parser_1 = __nccwpck_require__(14681); +const util_base64_1 = __nccwpck_require__(75600); +const util_utf8_1 = __nccwpck_require__(41895); +const util_endpoints_1 = __nccwpck_require__(13350); +var p = "required"; +var q = "fn"; +var r = "argv"; +var s = "ref"; +var a = "PartitionResult"; +var b = "tree"; +var c = "error"; +var d = "endpoint"; +var e = { [p]: false, "type": "String" }; +var f = { [p]: true, "default": false, "type": "Boolean" }; +var g = { [s]: "Endpoint" }; +var h = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }; +var i = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }; +var j = {}; +var k = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsFIPS"] }] }; +var l = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsDualStack"] }] }; +var m = [g]; +var n = [h]; +var o = [i]; +var _data = { version: "1.0", parameters: { Region: e, UseDualStack: f, UseFIPS: f, Endpoint: e }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: a }], type: b, rules: [{ conditions: [{ [q]: "isSet", [r]: m }, { [q]: "parseURL", [r]: m, assign: "url" }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: g, properties: j, headers: j }, type: d }] }] }, { conditions: [h, i], type: b, rules: [{ conditions: [k, l], type: b, rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [k], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [l], type: b, rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] }; +var ruleSet = _data; +var defaultEndpointResolver = (endpointParams, context = {}) => { + return (0, util_endpoints_1.resolveEndpoint)(ruleSet, { + endpointParams, + logger: context.logger + }); +}; +var getRuntimeConfig = (config) => { + var _a, _b, _c, _d, _e, _f, _g, _h, _j; + return ({ + apiVersion: "2019-06-10", + base64Decoder: (_a = config === null || config === void 0 ? void 0 : config.base64Decoder) !== null && _a !== void 0 ? _a : util_base64_1.fromBase64, + base64Encoder: (_b = config === null || config === void 0 ? void 0 : config.base64Encoder) !== null && _b !== void 0 ? _b : util_base64_1.toBase64, + disableHostPrefix: (_c = config === null || config === void 0 ? void 0 : config.disableHostPrefix) !== null && _c !== void 0 ? _c : false, + endpointProvider: (_d = config === null || config === void 0 ? void 0 : config.endpointProvider) !== null && _d !== void 0 ? _d : defaultEndpointResolver, + logger: (_e = config === null || config === void 0 ? void 0 : config.logger) !== null && _e !== void 0 ? _e : new smithy_client_2.NoOpLogger(), + serviceId: (_f = config === null || config === void 0 ? void 0 : config.serviceId) !== null && _f !== void 0 ? _f : "SSO OIDC", + urlParser: (_g = config === null || config === void 0 ? void 0 : config.urlParser) !== null && _g !== void 0 ? _g : url_parser_1.parseUrl, + utf8Decoder: (_h = config === null || config === void 0 ? void 0 : config.utf8Decoder) !== null && _h !== void 0 ? _h : util_utf8_1.fromUtf8, + utf8Encoder: (_j = config === null || config === void 0 ? void 0 : config.utf8Encoder) !== null && _j !== void 0 ? _j : util_utf8_1.toUtf8 + }); +}; +const smithy_client_3 = __nccwpck_require__(63570); +const util_defaults_mode_node_1 = __nccwpck_require__(72429); +const smithy_client_4 = __nccwpck_require__(63570); +var getRuntimeConfig2 = (config) => { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; + (0, smithy_client_4.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_3.loadConfigsForDefaultMode); + const clientSharedValues = getRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: (_a = config === null || config === void 0 ? void 0 : config.bodyLengthChecker) !== null && _a !== void 0 ? _a : util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: (_b = config === null || config === void 0 ? void 0 : config.defaultUserAgentProvider) !== null && _b !== void 0 ? _b : (0, util_user_agent_node_1.defaultUserAgent)({ serviceId: clientSharedValues.serviceId, clientVersion: package_default.version }), + maxAttempts: (_c = config === null || config === void 0 ? void 0 : config.maxAttempts) !== null && _c !== void 0 ? _c : (0, node_config_provider_1.loadConfig)(middleware_retry_2.NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: (_d = config === null || config === void 0 ? void 0 : config.region) !== null && _d !== void 0 ? _d : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_REGION_CONFIG_OPTIONS, config_resolver_2.NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: (_e = config === null || config === void 0 ? void 0 : config.requestHandler) !== null && _e !== void 0 ? _e : new node_http_handler_1.NodeHttpHandler(defaultConfigProvider), + retryMode: (_f = config === null || config === void 0 ? void 0 : config.retryMode) !== null && _f !== void 0 ? _f : (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_2.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE + }), + sha256: (_g = config === null || config === void 0 ? void 0 : config.sha256) !== null && _g !== void 0 ? _g : hash_node_1.Hash.bind(null, "sha256"), + streamCollector: (_h = config === null || config === void 0 ? void 0 : config.streamCollector) !== null && _h !== void 0 ? _h : node_http_handler_1.streamCollector, + useDualstackEndpoint: (_j = config === null || config === void 0 ? void 0 : config.useDualstackEndpoint) !== null && _j !== void 0 ? _j : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS), + useFipsEndpoint: (_k = config === null || config === void 0 ? void 0 : config.useFipsEndpoint) !== null && _k !== void 0 ? _k : (0, node_config_provider_1.loadConfig)(config_resolver_2.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS) + }; +}; +var SSOOIDCClient = class extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = getRuntimeConfig2(configuration || {}); + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, config_resolver_1.resolveRegionConfig)(_config_1); + const _config_3 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_2); + const _config_4 = (0, middleware_retry_1.resolveRetryConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +}; +exports.SSOOIDCClient = SSOOIDCClient; +const smithy_client_5 = __nccwpck_require__(63570); +const middleware_endpoint_2 = __nccwpck_require__(82918); +const middleware_serde_1 = __nccwpck_require__(81238); +const smithy_client_6 = __nccwpck_require__(63570); +const protocol_http_1 = __nccwpck_require__(64418); +const smithy_client_7 = __nccwpck_require__(63570); +const smithy_client_8 = __nccwpck_require__(63570); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends smithy_client_8.ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + this.name = "AccessDeniedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.AccessDeniedException = AccessDeniedException; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + this.name = "AuthorizationPendingException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.AuthorizationPendingException = AuthorizationPendingException; +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.ExpiredTokenException = ExpiredTokenException; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + this.name = "InternalServerException"; + this.$fault = "server"; + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InternalServerException = InternalServerException; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidClientException = InvalidClientException; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + this.name = "InvalidGrantException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidRequestException = InvalidRequestException; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + this.name = "InvalidScopeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.InvalidScopeException = InvalidScopeException; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + this.name = "SlowDownException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.SlowDownException = SlowDownException; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + this.name = "UnauthorizedClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.UnauthorizedClientException = UnauthorizedClientException; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + this.name = "UnsupportedGrantTypeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +exports.UnsupportedGrantTypeException = UnsupportedGrantTypeException; +var InvalidClientMetadataException = class _InvalidClientMetadataException extends SSOOIDCServiceException { + constructor(opts) { + super({ + name: "InvalidClientMetadataException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientMetadataException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientMetadataException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var se_CreateTokenCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/token`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientId: [], + clientSecret: [], + code: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => (0, smithy_client_7._json)(_) + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var se_RegisterClientCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/client/register`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientName: [], + clientType: [], + scopes: (_) => (0, smithy_client_7._json)(_) + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var se_StartDeviceAuthorizationCommand = async (input, context) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const headers = { + "content-type": "application/json" + }; + const resolvedPath = `${(basePath === null || basePath === void 0 ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}/device_authorization`; + let body; + body = JSON.stringify((0, smithy_client_7.take)(input, { + clientId: [], + clientSecret: [], + startUrl: [] + })); + return new protocol_http_1.HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body + }); +}; +var de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CreateTokenCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + accessToken: smithy_client_7.expectString, + expiresIn: smithy_client_7.expectInt32, + idToken: smithy_client_7.expectString, + refreshToken: smithy_client_7.expectString, + tokenType: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_CreateTokenCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var de_RegisterClientCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_RegisterClientCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + authorizationEndpoint: smithy_client_7.expectString, + clientId: smithy_client_7.expectString, + clientIdIssuedAt: smithy_client_7.expectLong, + clientSecret: smithy_client_7.expectString, + clientSecretExpiresAt: smithy_client_7.expectLong, + tokenEndpoint: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_RegisterClientCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientMetadataException": + case "com.amazonaws.ssooidc#InvalidClientMetadataException": + throw await de_InvalidClientMetadataExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var de_StartDeviceAuthorizationCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_StartDeviceAuthorizationCommandError(output, context); + } + const contents = (0, smithy_client_7.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, smithy_client_7.expectNonNull)((0, smithy_client_7.expectObject)(await parseBody(output.body, context)), "body"); + const doc = (0, smithy_client_7.take)(data, { + deviceCode: smithy_client_7.expectString, + expiresIn: smithy_client_7.expectInt32, + interval: smithy_client_7.expectInt32, + userCode: smithy_client_7.expectString, + verificationUri: smithy_client_7.expectString, + verificationUriComplete: smithy_client_7.expectString + }); + Object.assign(contents, doc); + return contents; +}; +var de_StartDeviceAuthorizationCommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context) + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}; +var throwDefaultError = (0, smithy_client_7.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidClientMetadataExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientMetadataException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = (0, smithy_client_7.map)({}); + const data = parsedOutput.body; + const doc = (0, smithy_client_7.take)(data, { + error: smithy_client_7.expectString, + error_description: smithy_client_7.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, smithy_client_7.decorateServiceException)(exception, parsedOutput.body); +}; +var deserializeMetadata = (output) => { + var _a, _b; + return ({ + httpStatusCode: output.statusCode, + requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] + }); +}; +var collectBodyString = (streamBody, context) => (0, smithy_client_7.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)); +var parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; +}); +var parseErrorBody = async (errorBody, context) => { + var _a; + const value = await parseBody(errorBody, context); + value.message = (_a = value.message) !== null && _a !== void 0 ? _a : value.Message; + return value; +}; +var loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k2) => k2.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}; +var CreateTokenCommand = class _CreateTokenCommand extends smithy_client_6.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_2.getEndpointPlugin)(configuration, _CreateTokenCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "CreateTokenCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_CreateTokenCommand(input, context); + } + deserialize(output, context) { + return de_CreateTokenCommand(output, context); + } +}; +exports.CreateTokenCommand = CreateTokenCommand; +const middleware_endpoint_3 = __nccwpck_require__(82918); +const middleware_serde_2 = __nccwpck_require__(81238); +const smithy_client_9 = __nccwpck_require__(63570); +var RegisterClientCommand = class _RegisterClientCommand extends smithy_client_9.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_2.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_3.getEndpointPlugin)(configuration, _RegisterClientCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "RegisterClientCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_RegisterClientCommand(input, context); + } + deserialize(output, context) { + return de_RegisterClientCommand(output, context); + } +}; +const middleware_endpoint_4 = __nccwpck_require__(82918); +const middleware_serde_3 = __nccwpck_require__(81238); +const smithy_client_10 = __nccwpck_require__(63570); +var StartDeviceAuthorizationCommand = class _StartDeviceAuthorizationCommand extends smithy_client_10.Command { + constructor(input) { + super(); + this.input = input; + } + static getEndpointParameterInstructions() { + return { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } + }; + } + resolveMiddleware(clientStack, configuration, options) { + this.middlewareStack.use((0, middleware_serde_3.getSerdePlugin)(configuration, this.serialize, this.deserialize)); + this.middlewareStack.use((0, middleware_endpoint_4.getEndpointPlugin)(configuration, _StartDeviceAuthorizationCommand.getEndpointParameterInstructions())); + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const clientName = "SSOOIDCClient"; + const commandName = "StartDeviceAuthorizationCommand"; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: (_) => _, + outputFilterSensitiveLog: (_) => _ + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } + serialize(input, context) { + return se_StartDeviceAuthorizationCommand(input, context); + } + deserialize(output, context) { + return de_StartDeviceAuthorizationCommand(output, context); + } +}; +var commands = { + CreateTokenCommand, + RegisterClientCommand, + StartDeviceAuthorizationCommand +}; +var SSOOIDC = class extends SSOOIDCClient { +}; +(0, smithy_client_5.createAggregatedClient)(commands, SSOOIDC); /***/ }), -/***/ 81308: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 92242: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisvideomedia'] = {}; -AWS.KinesisVideoMedia = Service.defineService('kinesisvideomedia', ['2017-09-30']); -Object.defineProperty(apiLoader.services['kinesisvideomedia'], '2017-09-30', { - get: function get() { - var model = __nccwpck_require__(18898); - model.paginators = (__nccwpck_require__(85061)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisVideoMedia; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.REFRESH_MESSAGE = exports.EXPIRE_WINDOW_MS = void 0; +exports.EXPIRE_WINDOW_MS = 5 * 60 * 1000; +exports.REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; /***/ }), -/***/ 12710: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 85125: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisvideosignalingchannels'] = {}; -AWS.KinesisVideoSignalingChannels = Service.defineService('kinesisvideosignalingchannels', ['2019-12-04']); -Object.defineProperty(apiLoader.services['kinesisvideosignalingchannels'], '2019-12-04', { - get: function get() { - var model = __nccwpck_require__(89769); - model.paginators = (__nccwpck_require__(41939)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisVideoSignalingChannels; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSso = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const constants_1 = __nccwpck_require__(92242); +const getNewSsoOidcToken_1 = __nccwpck_require__(93601); +const validateTokenExpiry_1 = __nccwpck_require__(28418); +const validateTokenKey_1 = __nccwpck_require__(2488); +const writeSSOTokenToFile_1 = __nccwpck_require__(48552); +const lastRefreshAttemptTime = new Date(0); +const fromSso = (init = {}) => async () => { + const profiles = await (0, shared_ini_file_loader_1.parseKnownFiles)(init); + const profileName = (0, shared_ini_file_loader_1.getProfileName)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new property_provider_1.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new property_provider_1.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, shared_ini_file_loader_1.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new property_provider_1.TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new property_provider_1.TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, shared_ini_file_loader_1.getSSOTokenFromFile)(ssoSessionName); + } + catch (e) { + throw new property_provider_1.TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${constants_1.REFRESH_MESSAGE}`, false); + } + (0, validateTokenKey_1.validateTokenKey)("accessToken", ssoToken.accessToken); + (0, validateTokenKey_1.validateTokenKey)("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > constants_1.EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + (0, validateTokenExpiry_1.validateTokenExpiry)(existingToken); + return existingToken; + } + (0, validateTokenKey_1.validateTokenKey)("clientId", ssoToken.clientId, true); + (0, validateTokenKey_1.validateTokenKey)("clientSecret", ssoToken.clientSecret, true); + (0, validateTokenKey_1.validateTokenKey)("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await (0, getNewSsoOidcToken_1.getNewSsoOidcToken)(ssoToken, ssoRegion); + (0, validateTokenKey_1.validateTokenKey)("accessToken", newSsoOidcToken.accessToken); + (0, validateTokenKey_1.validateTokenKey)("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await (0, writeSSOTokenToFile_1.writeSSOTokenToFile)(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + (0, validateTokenExpiry_1.validateTokenExpiry)(existingToken); + return existingToken; + } +}; +exports.fromSso = fromSso; /***/ }), -/***/ 52642: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 63258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kinesisvideowebrtcstorage'] = {}; -AWS.KinesisVideoWebRTCStorage = Service.defineService('kinesisvideowebrtcstorage', ['2018-05-10']); -Object.defineProperty(apiLoader.services['kinesisvideowebrtcstorage'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(62761); - model.paginators = (__nccwpck_require__(3540)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KinesisVideoWebRTCStorage; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromStatic = ({ token }) => async () => { + if (!token || !token.token) { + throw new property_provider_1.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; +exports.fromStatic = fromStatic; /***/ }), -/***/ 56782: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 93601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['kms'] = {}; -AWS.KMS = Service.defineService('kms', ['2014-11-01']); -Object.defineProperty(apiLoader.services['kms'], '2014-11-01', { - get: function get() { - var model = __nccwpck_require__(1219); - model.paginators = (__nccwpck_require__(71402)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.KMS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getNewSsoOidcToken = void 0; +const client_sso_oidc_node_1 = __nccwpck_require__(52664); +const getSsoOidcClient_1 = __nccwpck_require__(99775); +const getNewSsoOidcToken = (ssoToken, ssoRegion) => { + const ssoOidcClient = (0, getSsoOidcClient_1.getSsoOidcClient)(ssoRegion); + return ssoOidcClient.send(new client_sso_oidc_node_1.CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; +exports.getNewSsoOidcToken = getNewSsoOidcToken; /***/ }), -/***/ 6726: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 99775: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lakeformation'] = {}; -AWS.LakeFormation = Service.defineService('lakeformation', ['2017-03-31']); -Object.defineProperty(apiLoader.services['lakeformation'], '2017-03-31', { - get: function get() { - var model = __nccwpck_require__(82210); - model.paginators = (__nccwpck_require__(61488)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LakeFormation; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSsoOidcClient = void 0; +const client_sso_oidc_node_1 = __nccwpck_require__(52664); +const ssoOidcClientsHash = {}; +const getSsoOidcClient = (ssoRegion) => { + if (ssoOidcClientsHash[ssoRegion]) { + return ssoOidcClientsHash[ssoRegion]; + } + const ssoOidcClient = new client_sso_oidc_node_1.SSOOIDCClient({ region: ssoRegion }); + ssoOidcClientsHash[ssoRegion] = ssoOidcClient; + return ssoOidcClient; +}; +exports.getSsoOidcClient = getSsoOidcClient; /***/ }), -/***/ 13321: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52843: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lambda'] = {}; -AWS.Lambda = Service.defineService('lambda', ['2014-11-11', '2015-03-31']); -__nccwpck_require__(8452); -Object.defineProperty(apiLoader.services['lambda'], '2014-11-11', { - get: function get() { - var model = __nccwpck_require__(91251); - model.paginators = (__nccwpck_require__(79210)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['lambda'], '2015-03-31', { - get: function get() { - var model = __nccwpck_require__(29103); - model.paginators = (__nccwpck_require__(32057)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(40626)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Lambda; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(52664), exports); +tslib_1.__exportStar(__nccwpck_require__(85125), exports); +tslib_1.__exportStar(__nccwpck_require__(63258), exports); +tslib_1.__exportStar(__nccwpck_require__(70195), exports); /***/ }), -/***/ 37397: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 70195: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lexmodelbuildingservice'] = {}; -AWS.LexModelBuildingService = Service.defineService('lexmodelbuildingservice', ['2017-04-19']); -Object.defineProperty(apiLoader.services['lexmodelbuildingservice'], '2017-04-19', { - get: function get() { - var model = __nccwpck_require__(96327); - model.paginators = (__nccwpck_require__(12348)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LexModelBuildingService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.nodeProvider = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromSso_1 = __nccwpck_require__(85125); +const nodeProvider = (init = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromSso_1.fromSso)(init), async () => { + throw new property_provider_1.TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); +exports.nodeProvider = nodeProvider; /***/ }), -/***/ 27254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lexmodelsv2'] = {}; -AWS.LexModelsV2 = Service.defineService('lexmodelsv2', ['2020-08-07']); -Object.defineProperty(apiLoader.services['lexmodelsv2'], '2020-08-07', { - get: function get() { - var model = __nccwpck_require__(98781); - model.paginators = (__nccwpck_require__(49461)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(55520)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LexModelsV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateTokenExpiry = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const constants_1 = __nccwpck_require__(92242); +const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new property_provider_1.TokenProviderError(`Token is expired. ${constants_1.REFRESH_MESSAGE}`, false); + } +}; +exports.validateTokenExpiry = validateTokenExpiry; /***/ }), -/***/ 62716: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2488: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lexruntime'] = {}; -AWS.LexRuntime = Service.defineService('lexruntime', ['2016-11-28']); -Object.defineProperty(apiLoader.services['lexruntime'], '2016-11-28', { - get: function get() { - var model = __nccwpck_require__(11059); - model.paginators = (__nccwpck_require__(97715)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LexRuntime; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateTokenKey = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const constants_1 = __nccwpck_require__(92242); +const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new property_provider_1.TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${constants_1.REFRESH_MESSAGE}`, false); + } +}; +exports.validateTokenKey = validateTokenKey; /***/ }), -/***/ 33855: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48552: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lexruntimev2'] = {}; -AWS.LexRuntimeV2 = Service.defineService('lexruntimev2', ['2020-08-07']); -Object.defineProperty(apiLoader.services['lexruntimev2'], '2020-08-07', { - get: function get() { - var model = __nccwpck_require__(17908); - model.paginators = (__nccwpck_require__(469)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LexRuntimeV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeSSOTokenToFile = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const fs_1 = __nccwpck_require__(57147); +const { writeFile } = fs_1.promises; +const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = (0, shared_ini_file_loader_1.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; +exports.writeSSOTokenToFile = writeSSOTokenToFile; /***/ }), -/***/ 34693: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52562: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['licensemanager'] = {}; -AWS.LicenseManager = Service.defineService('licensemanager', ['2018-08-01']); -Object.defineProperty(apiLoader.services['licensemanager'], '2018-08-01', { - get: function get() { - var model = __nccwpck_require__(19160); - model.paginators = (__nccwpck_require__(77552)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LicenseManager; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 52687: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['licensemanagerlinuxsubscriptions'] = {}; -AWS.LicenseManagerLinuxSubscriptions = Service.defineService('licensemanagerlinuxsubscriptions', ['2018-05-10']); -Object.defineProperty(apiLoader.services['licensemanagerlinuxsubscriptions'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(94260); - model.paginators = (__nccwpck_require__(60467)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LicenseManagerLinuxSubscriptions; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var types_1 = __nccwpck_require__(55756); +Object.defineProperty(exports, "HttpAuthLocation", ({ enumerable: true, get: function () { return types_1.HttpAuthLocation; } })); /***/ }), -/***/ 37725: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 14994: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['licensemanagerusersubscriptions'] = {}; -AWS.LicenseManagerUserSubscriptions = Service.defineService('licensemanagerusersubscriptions', ['2018-05-10']); -Object.defineProperty(apiLoader.services['licensemanagerusersubscriptions'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(48338); - model.paginators = (__nccwpck_require__(84416)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LicenseManagerUserSubscriptions; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 22718: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 65861: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lightsail'] = {}; -AWS.Lightsail = Service.defineService('lightsail', ['2016-11-28']); -Object.defineProperty(apiLoader.services['lightsail'], '2016-11-28', { - get: function get() { - var model = __nccwpck_require__(94784); - model.paginators = (__nccwpck_require__(17528)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Lightsail; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 44594: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 76527: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['location'] = {}; -AWS.Location = Service.defineService('location', ['2020-11-19']); -Object.defineProperty(apiLoader.services['location'], '2020-11-19', { - get: function get() { - var model = __nccwpck_require__(79257); - model.paginators = (__nccwpck_require__(53350)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Location; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 21843: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48470: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lookoutequipment'] = {}; -AWS.LookoutEquipment = Service.defineService('lookoutequipment', ['2020-12-15']); -Object.defineProperty(apiLoader.services['lookoutequipment'], '2020-12-15', { - get: function get() { - var model = __nccwpck_require__(50969); - model.paginators = (__nccwpck_require__(92858)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LookoutEquipment; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 78708: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28045: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lookoutmetrics'] = {}; -AWS.LookoutMetrics = Service.defineService('lookoutmetrics', ['2017-07-25']); -Object.defineProperty(apiLoader.services['lookoutmetrics'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(37749); - model.paginators = (__nccwpck_require__(13366)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LookoutMetrics; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 65046: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 67736: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['lookoutvision'] = {}; -AWS.LookoutVision = Service.defineService('lookoutvision', ['2020-11-20']); -Object.defineProperty(apiLoader.services['lookoutvision'], '2020-11-20', { - get: function get() { - var model = __nccwpck_require__(15110); - model.paginators = (__nccwpck_require__(45644)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.LookoutVision; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 22482: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 13268: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['m2'] = {}; -AWS.M2 = Service.defineService('m2', ['2021-04-28']); -Object.defineProperty(apiLoader.services['m2'], '2021-04-28', { - get: function get() { - var model = __nccwpck_require__(21363); - model.paginators = (__nccwpck_require__(96286)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.M2; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 82907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 90142: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['machinelearning'] = {}; -AWS.MachineLearning = Service.defineService('machinelearning', ['2014-12-12']); -__nccwpck_require__(19174); -Object.defineProperty(apiLoader.services['machinelearning'], '2014-12-12', { - get: function get() { - var model = __nccwpck_require__(4069); - model.paginators = (__nccwpck_require__(95535)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(23194)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MachineLearning; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HostAddressType = void 0; +var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType = exports.HostAddressType || (exports.HostAddressType = {})); /***/ }), -/***/ 86427: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 62338: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['macie'] = {}; -AWS.Macie = Service.defineService('macie', ['2017-12-19']); -Object.defineProperty(apiLoader.services['macie'], '2017-12-19', { - get: function get() { - var model = __nccwpck_require__(99366); - model.paginators = (__nccwpck_require__(34091)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Macie; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 57330: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 99385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['macie2'] = {}; -AWS.Macie2 = Service.defineService('macie2', ['2020-01-01']); -Object.defineProperty(apiLoader.services['macie2'], '2020-01-01', { - get: function get() { - var model = __nccwpck_require__(50847); - model.paginators = (__nccwpck_require__(25947)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(71131)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Macie2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var types_1 = __nccwpck_require__(55756); +Object.defineProperty(exports, "EndpointURLScheme", ({ enumerable: true, get: function () { return types_1.EndpointURLScheme; } })); /***/ }), -/***/ 85143: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37521: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['managedblockchain'] = {}; -AWS.ManagedBlockchain = Service.defineService('managedblockchain', ['2018-09-24']); -Object.defineProperty(apiLoader.services['managedblockchain'], '2018-09-24', { - get: function get() { - var model = __nccwpck_require__(31229); - model.paginators = (__nccwpck_require__(57358)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ManagedBlockchain; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 2609: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 61393: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['marketplacecatalog'] = {}; -AWS.MarketplaceCatalog = Service.defineService('marketplacecatalog', ['2018-09-17']); -Object.defineProperty(apiLoader.services['marketplacecatalog'], '2018-09-17', { - get: function get() { - var model = __nccwpck_require__(87122); - model.paginators = (__nccwpck_require__(30187)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MarketplaceCatalog; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 4540: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 51821: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['marketplacecommerceanalytics'] = {}; -AWS.MarketplaceCommerceAnalytics = Service.defineService('marketplacecommerceanalytics', ['2015-07-01']); -Object.defineProperty(apiLoader.services['marketplacecommerceanalytics'], '2015-07-01', { - get: function get() { - var model = __nccwpck_require__(96696); - model.paginators = (__nccwpck_require__(43265)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MarketplaceCommerceAnalytics; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 53707: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 92635: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['marketplaceentitlementservice'] = {}; -AWS.MarketplaceEntitlementService = Service.defineService('marketplaceentitlementservice', ['2017-01-11']); -Object.defineProperty(apiLoader.services['marketplaceentitlementservice'], '2017-01-11', { - get: function get() { - var model = __nccwpck_require__(64253); - model.paginators = (__nccwpck_require__(67012)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MarketplaceEntitlementService; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 39297: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71301: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['marketplacemetering'] = {}; -AWS.MarketplaceMetering = Service.defineService('marketplacemetering', ['2016-01-14']); -Object.defineProperty(apiLoader.services['marketplacemetering'], '2016-01-14', { - get: function get() { - var model = __nccwpck_require__(43027); - model.paginators = (__nccwpck_require__(4843)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MarketplaceMetering; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 67639: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21268: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediaconnect'] = {}; -AWS.MediaConnect = Service.defineService('mediaconnect', ['2018-11-14']); -Object.defineProperty(apiLoader.services['mediaconnect'], '2018-11-14', { - get: function get() { - var model = __nccwpck_require__(85245); - model.paginators = (__nccwpck_require__(68160)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(42876)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaConnect; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 57220: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7192: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediaconvert'] = {}; -AWS.MediaConvert = Service.defineService('mediaconvert', ['2017-08-29']); -Object.defineProperty(apiLoader.services['mediaconvert'], '2017-08-29', { - get: function get() { - var model = __nccwpck_require__(41924); - model.paginators = (__nccwpck_require__(14179)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaConvert; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 7509: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 10640: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['medialive'] = {}; -AWS.MediaLive = Service.defineService('medialive', ['2017-10-14']); -Object.defineProperty(apiLoader.services['medialive'], '2017-10-14', { - get: function get() { - var model = __nccwpck_require__(32326); - model.paginators = (__nccwpck_require__(84652)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(17259)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaLive; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(51821), exports); +tslib_1.__exportStar(__nccwpck_require__(92635), exports); +tslib_1.__exportStar(__nccwpck_require__(71301), exports); +tslib_1.__exportStar(__nccwpck_require__(21268), exports); +tslib_1.__exportStar(__nccwpck_require__(7192), exports); /***/ }), -/***/ 91620: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89029: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediapackage'] = {}; -AWS.MediaPackage = Service.defineService('mediapackage', ['2017-10-12']); -Object.defineProperty(apiLoader.services['mediapackage'], '2017-10-12', { - get: function get() { - var model = __nccwpck_require__(51261); - model.paginators = (__nccwpck_require__(48933)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(52562), exports); +tslib_1.__exportStar(__nccwpck_require__(26913), exports); +tslib_1.__exportStar(__nccwpck_require__(14994), exports); +tslib_1.__exportStar(__nccwpck_require__(65861), exports); +tslib_1.__exportStar(__nccwpck_require__(76527), exports); +tslib_1.__exportStar(__nccwpck_require__(48470), exports); +tslib_1.__exportStar(__nccwpck_require__(28045), exports); +tslib_1.__exportStar(__nccwpck_require__(67736), exports); +tslib_1.__exportStar(__nccwpck_require__(13268), exports); +tslib_1.__exportStar(__nccwpck_require__(90142), exports); +tslib_1.__exportStar(__nccwpck_require__(62338), exports); +tslib_1.__exportStar(__nccwpck_require__(99385), exports); +tslib_1.__exportStar(__nccwpck_require__(37521), exports); +tslib_1.__exportStar(__nccwpck_require__(61393), exports); +tslib_1.__exportStar(__nccwpck_require__(10640), exports); +tslib_1.__exportStar(__nccwpck_require__(89910), exports); +tslib_1.__exportStar(__nccwpck_require__(36678), exports); +tslib_1.__exportStar(__nccwpck_require__(39931), exports); +tslib_1.__exportStar(__nccwpck_require__(42620), exports); +tslib_1.__exportStar(__nccwpck_require__(89062), exports); +tslib_1.__exportStar(__nccwpck_require__(89546), exports); +tslib_1.__exportStar(__nccwpck_require__(80316), exports); +tslib_1.__exportStar(__nccwpck_require__(57835), exports); +tslib_1.__exportStar(__nccwpck_require__(91678), exports); +tslib_1.__exportStar(__nccwpck_require__(93818), exports); +tslib_1.__exportStar(__nccwpck_require__(51991), exports); +tslib_1.__exportStar(__nccwpck_require__(24296), exports); +tslib_1.__exportStar(__nccwpck_require__(59416), exports); +tslib_1.__exportStar(__nccwpck_require__(92772), exports); +tslib_1.__exportStar(__nccwpck_require__(20134), exports); +tslib_1.__exportStar(__nccwpck_require__(34465), exports); + + +/***/ }), + +/***/ 89910: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; -module.exports = AWS.MediaPackage; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 53264: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 36678: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediapackagev2'] = {}; -AWS.MediaPackageV2 = Service.defineService('mediapackagev2', ['2022-12-25']); -Object.defineProperty(apiLoader.services['mediapackagev2'], '2022-12-25', { - get: function get() { - var model = __nccwpck_require__(37594); - model.paginators = (__nccwpck_require__(44503)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(68906)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaPackageV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 14962: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 39931: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediapackagevod'] = {}; -AWS.MediaPackageVod = Service.defineService('mediapackagevod', ['2018-11-07']); -Object.defineProperty(apiLoader.services['mediapackagevod'], '2018-11-07', { - get: function get() { - var model = __nccwpck_require__(98877); - model.paginators = (__nccwpck_require__(48422)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaPackageVod; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 83748: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42620: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediastore'] = {}; -AWS.MediaStore = Service.defineService('mediastore', ['2017-09-01']); -Object.defineProperty(apiLoader.services['mediastore'], '2017-09-01', { - get: function get() { - var model = __nccwpck_require__(68901); - model.paginators = (__nccwpck_require__(5848)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaStore; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 98703: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89062: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediastoredata'] = {}; -AWS.MediaStoreData = Service.defineService('mediastoredata', ['2017-09-01']); -Object.defineProperty(apiLoader.services['mediastoredata'], '2017-09-01', { - get: function get() { - var model = __nccwpck_require__(55081); - model.paginators = (__nccwpck_require__(97948)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaStoreData; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 99658: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89546: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mediatailor'] = {}; -AWS.MediaTailor = Service.defineService('mediatailor', ['2018-04-23']); -Object.defineProperty(apiLoader.services['mediatailor'], '2018-04-23', { - get: function get() { - var model = __nccwpck_require__(77511); - model.paginators = (__nccwpck_require__(68557)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MediaTailor; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 50782: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80316: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['memorydb'] = {}; -AWS.MemoryDB = Service.defineService('memorydb', ['2021-01-01']); -Object.defineProperty(apiLoader.services['memorydb'], '2021-01-01', { - get: function get() { - var model = __nccwpck_require__(51950); - model.paginators = (__nccwpck_require__(93809)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MemoryDB; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 41339: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57835: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mgn'] = {}; -AWS.Mgn = Service.defineService('mgn', ['2020-02-26']); -Object.defineProperty(apiLoader.services['mgn'], '2020-02-26', { - get: function get() { - var model = __nccwpck_require__(65811); - model.paginators = (__nccwpck_require__(52443)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Mgn; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 14688: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 91678: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['migrationhub'] = {}; -AWS.MigrationHub = Service.defineService('migrationhub', ['2017-05-31']); -Object.defineProperty(apiLoader.services['migrationhub'], '2017-05-31', { - get: function get() { - var model = __nccwpck_require__(99161); - model.paginators = (__nccwpck_require__(27903)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MigrationHub; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 62658: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 93818: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['migrationhubconfig'] = {}; -AWS.MigrationHubConfig = Service.defineService('migrationhubconfig', ['2019-06-30']); -Object.defineProperty(apiLoader.services['migrationhubconfig'], '2019-06-30', { - get: function get() { - var model = __nccwpck_require__(59734); - model.paginators = (__nccwpck_require__(51497)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MigrationHubConfig; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 66120: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 51991: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['migrationhuborchestrator'] = {}; -AWS.MigrationHubOrchestrator = Service.defineService('migrationhuborchestrator', ['2021-08-28']); -Object.defineProperty(apiLoader.services['migrationhuborchestrator'], '2021-08-28', { - get: function get() { - var model = __nccwpck_require__(73093); - model.paginators = (__nccwpck_require__(24233)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(83173)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MigrationHubOrchestrator; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 2925: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 24296: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['migrationhubrefactorspaces'] = {}; -AWS.MigrationHubRefactorSpaces = Service.defineService('migrationhubrefactorspaces', ['2021-10-26']); -Object.defineProperty(apiLoader.services['migrationhubrefactorspaces'], '2021-10-26', { - get: function get() { - var model = __nccwpck_require__(17110); - model.paginators = (__nccwpck_require__(63789)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MigrationHubRefactorSpaces; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 96533: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59416: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['migrationhubstrategy'] = {}; -AWS.MigrationHubStrategy = Service.defineService('migrationhubstrategy', ['2020-02-19']); -Object.defineProperty(apiLoader.services['migrationhubstrategy'], '2020-02-19', { - get: function get() { - var model = __nccwpck_require__(64663); - model.paginators = (__nccwpck_require__(30896)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MigrationHubStrategy; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var types_1 = __nccwpck_require__(55756); +Object.defineProperty(exports, "RequestHandlerProtocol", ({ enumerable: true, get: function () { return types_1.RequestHandlerProtocol; } })); /***/ }), -/***/ 39782: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 92772: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mobile'] = {}; -AWS.Mobile = Service.defineService('mobile', ['2017-07-01']); -Object.defineProperty(apiLoader.services['mobile'], '2017-07-01', { - get: function get() { - var model = __nccwpck_require__(51691); - model.paginators = (__nccwpck_require__(43522)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Mobile; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 66690: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20134: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mobileanalytics'] = {}; -AWS.MobileAnalytics = Service.defineService('mobileanalytics', ['2014-06-05']); -Object.defineProperty(apiLoader.services['mobileanalytics'], '2014-06-05', { - get: function get() { - var model = __nccwpck_require__(90338); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MobileAnalytics; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 23093: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 34465: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mq'] = {}; -AWS.MQ = Service.defineService('mq', ['2017-11-27']); -Object.defineProperty(apiLoader.services['mq'], '2017-11-27', { - get: function get() { - var model = __nccwpck_require__(35102); - model.paginators = (__nccwpck_require__(46095)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MQ; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 79954: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81809: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mturk'] = {}; -AWS.MTurk = Service.defineService('mturk', ['2017-01-17']); -Object.defineProperty(apiLoader.services['mturk'], '2017-01-17', { - get: function get() { - var model = __nccwpck_require__(73064); - model.paginators = (__nccwpck_require__(42409)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MTurk; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.debugId = void 0; +exports.debugId = "endpoints"; /***/ }), -/***/ 32712: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 27617: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['mwaa'] = {}; -AWS.MWAA = Service.defineService('mwaa', ['2020-07-01']); -Object.defineProperty(apiLoader.services['mwaa'], '2020-07-01', { - get: function get() { - var model = __nccwpck_require__(56612); - model.paginators = (__nccwpck_require__(11793)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.MWAA; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(81809), exports); +tslib_1.__exportStar(__nccwpck_require__(46833), exports); /***/ }), -/***/ 30047: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 46833: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['neptune'] = {}; -AWS.Neptune = Service.defineService('neptune', ['2014-10-31']); -__nccwpck_require__(73090); -Object.defineProperty(apiLoader.services['neptune'], '2014-10-31', { - get: function get() { - var model = __nccwpck_require__(50018); - model.paginators = (__nccwpck_require__(62952)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(8127)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Neptune; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDebugString = void 0; +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +exports.toDebugString = toDebugString; /***/ }), -/***/ 84626: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 13350: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['networkfirewall'] = {}; -AWS.NetworkFirewall = Service.defineService('networkfirewall', ['2020-11-12']); -Object.defineProperty(apiLoader.services['networkfirewall'], '2020-11-12', { - get: function get() { - var model = __nccwpck_require__(63757); - model.paginators = (__nccwpck_require__(74798)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.NetworkFirewall; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(37482), exports); +tslib_1.__exportStar(__nccwpck_require__(73442), exports); +tslib_1.__exportStar(__nccwpck_require__(36563), exports); +tslib_1.__exportStar(__nccwpck_require__(57433), exports); /***/ }), -/***/ 37610: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 46835: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['networkmanager'] = {}; -AWS.NetworkManager = Service.defineService('networkmanager', ['2019-07-05']); -Object.defineProperty(apiLoader.services['networkmanager'], '2019-07-05', { - get: function get() { - var model = __nccwpck_require__(10151); - model.paginators = (__nccwpck_require__(68278)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.NetworkManager; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(48079), exports); +tslib_1.__exportStar(__nccwpck_require__(34711), exports); +tslib_1.__exportStar(__nccwpck_require__(37482), exports); /***/ }), -/***/ 89428: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['nimble'] = {}; -AWS.Nimble = Service.defineService('nimble', ['2020-08-01']); -Object.defineProperty(apiLoader.services['nimble'], '2020-08-01', { - get: function get() { - var model = __nccwpck_require__(50605); - model.paginators = (__nccwpck_require__(65300)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(42486)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Nimble; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isVirtualHostableS3Bucket = void 0; +const isIpAddress_1 = __nccwpck_require__(73442); +const isValidHostLabel_1 = __nccwpck_require__(57373); +const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!(0, exports.isVirtualHostableS3Bucket)(label)) { + return false; + } + } + return true; + } + if (!(0, isValidHostLabel_1.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, isIpAddress_1.isIpAddress)(value)) { + return false; + } + return true; +}; +exports.isVirtualHostableS3Bucket = isVirtualHostableS3Bucket; /***/ }), -/***/ 9319: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 34711: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['oam'] = {}; -AWS.OAM = Service.defineService('oam', ['2022-06-10']); -Object.defineProperty(apiLoader.services['oam'], '2022-06-10', { - get: function get() { - var model = __nccwpck_require__(13463); - model.paginators = (__nccwpck_require__(55717)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OAM; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseArn = void 0; +const parseArn = (value) => { + const segments = value.split(":"); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourceId] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourceId[0] === "") + return null; + return { + partition, + service, + region, + accountId, + resourceId: resourceId[0].includes("/") ? resourceId[0].split("/") : resourceId, + }; +}; +exports.parseArn = parseArn; /***/ }), -/***/ 75114: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37482: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['omics'] = {}; -AWS.Omics = Service.defineService('omics', ['2022-11-28']); -Object.defineProperty(apiLoader.services['omics'], '2022-11-28', { - get: function get() { - var model = __nccwpck_require__(74258); - model.paginators = (__nccwpck_require__(78278)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(31165)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Omics; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentPrefix = exports.useDefaultPartitionInfo = exports.setPartitionInfo = exports.partition = void 0; +const tslib_1 = __nccwpck_require__(4351); +const partitions_json_1 = tslib_1.__importDefault(__nccwpck_require__(95367)); +let selectedPartitionsInfo = partitions_json_1.default; +let selectedUserAgentPrefix = ""; +const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +exports.partition = partition; +const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +exports.setPartitionInfo = setPartitionInfo; +const useDefaultPartitionInfo = () => { + (0, exports.setPartitionInfo)(partitions_json_1.default, ""); +}; +exports.useDefaultPartitionInfo = useDefaultPartitionInfo; +const getUserAgentPrefix = () => selectedUserAgentPrefix; +exports.getUserAgentPrefix = getUserAgentPrefix; /***/ }), -/***/ 60358: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55370: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['opensearch'] = {}; -AWS.OpenSearch = Service.defineService('opensearch', ['2021-01-01']); -Object.defineProperty(apiLoader.services['opensearch'], '2021-01-01', { - get: function get() { - var model = __nccwpck_require__(90583); - model.paginators = (__nccwpck_require__(32668)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OpenSearch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.booleanEquals = void 0; +const booleanEquals = (value1, value2) => value1 === value2; +exports.booleanEquals = booleanEquals; /***/ }), -/***/ 86277: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20767: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['opensearchserverless'] = {}; -AWS.OpenSearchServerless = Service.defineService('opensearchserverless', ['2021-11-01']); -Object.defineProperty(apiLoader.services['opensearchserverless'], '2021-11-01', { - get: function get() { - var model = __nccwpck_require__(61668); - model.paginators = (__nccwpck_require__(68785)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OpenSearchServerless; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAttr = void 0; +const types_1 = __nccwpck_require__(57433); +const getAttrPathList_1 = __nccwpck_require__(81844); +const getAttr = (value, path) => (0, getAttrPathList_1.getAttrPathList)(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new types_1.EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); +exports.getAttr = getAttr; /***/ }), -/***/ 75691: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['opsworks'] = {}; -AWS.OpsWorks = Service.defineService('opsworks', ['2013-02-18']); -Object.defineProperty(apiLoader.services['opsworks'], '2013-02-18', { - get: function get() { - var model = __nccwpck_require__(22805); - model.paginators = (__nccwpck_require__(24750)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(74961)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OpsWorks; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAttrPathList = void 0; +const types_1 = __nccwpck_require__(57433); +const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new types_1.EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new types_1.EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; +exports.getAttrPathList = getAttrPathList; /***/ }), -/***/ 80388: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 83188: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['opsworkscm'] = {}; -AWS.OpsWorksCM = Service.defineService('opsworkscm', ['2016-11-01']); -Object.defineProperty(apiLoader.services['opsworkscm'], '2016-11-01', { - get: function get() { - var model = __nccwpck_require__(56705); - model.paginators = (__nccwpck_require__(49463)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(65003)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OpsWorksCM; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.aws = void 0; +const tslib_1 = __nccwpck_require__(4351); +exports.aws = tslib_1.__importStar(__nccwpck_require__(46835)); +tslib_1.__exportStar(__nccwpck_require__(55370), exports); +tslib_1.__exportStar(__nccwpck_require__(20767), exports); +tslib_1.__exportStar(__nccwpck_require__(78816), exports); +tslib_1.__exportStar(__nccwpck_require__(57373), exports); +tslib_1.__exportStar(__nccwpck_require__(29692), exports); +tslib_1.__exportStar(__nccwpck_require__(22780), exports); +tslib_1.__exportStar(__nccwpck_require__(55182), exports); +tslib_1.__exportStar(__nccwpck_require__(48305), exports); +tslib_1.__exportStar(__nccwpck_require__(6535), exports); /***/ }), -/***/ 44670: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 73442: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['organizations'] = {}; -AWS.Organizations = Service.defineService('organizations', ['2016-11-28']); -Object.defineProperty(apiLoader.services['organizations'], '2016-11-28', { - get: function get() { - var model = __nccwpck_require__(58874); - model.paginators = (__nccwpck_require__(43261)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Organizations; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isIpAddress = void 0; +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); +exports.isIpAddress = isIpAddress; /***/ }), -/***/ 98021: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 78816: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['osis'] = {}; -AWS.OSIS = Service.defineService('osis', ['2022-01-01']); -Object.defineProperty(apiLoader.services['osis'], '2022-01-01', { - get: function get() { - var model = __nccwpck_require__(51838); - model.paginators = (__nccwpck_require__(72472)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.OSIS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSet = void 0; +const isSet = (value) => value != null; +exports.isSet = isSet; /***/ }), -/***/ 27551: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57373: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['outposts'] = {}; -AWS.Outposts = Service.defineService('outposts', ['2019-12-03']); -Object.defineProperty(apiLoader.services['outposts'], '2019-12-03', { - get: function get() { - var model = __nccwpck_require__(4807); - model.paginators = (__nccwpck_require__(3364)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Outposts; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostLabel = void 0; +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!(0, exports.isValidHostLabel)(label)) { + return false; + } + } + return true; +}; +exports.isValidHostLabel = isValidHostLabel; /***/ }), -/***/ 20368: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 29692: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['panorama'] = {}; -AWS.Panorama = Service.defineService('panorama', ['2019-07-24']); -Object.defineProperty(apiLoader.services['panorama'], '2019-07-24', { - get: function get() { - var model = __nccwpck_require__(91489); - model.paginators = (__nccwpck_require__(77238)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Panorama; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.not = void 0; +const not = (value) => !value; +exports.not = not; /***/ }), -/***/ 11594: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 22780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['paymentcryptography'] = {}; -AWS.PaymentCryptography = Service.defineService('paymentcryptography', ['2021-09-14']); -Object.defineProperty(apiLoader.services['paymentcryptography'], '2021-09-14', { - get: function get() { - var model = __nccwpck_require__(86072); - model.paginators = (__nccwpck_require__(17819)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PaymentCryptography; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseURL = void 0; +const types_1 = __nccwpck_require__(89029); +const isIpAddress_1 = __nccwpck_require__(73442); +const DEFAULT_PORTS = { + [types_1.EndpointURLScheme.HTTP]: 80, + [types_1.EndpointURLScheme.HTTPS]: 443, +}; +const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(types_1.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = (0, isIpAddress_1.isIpAddress)(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; +exports.parseURL = parseURL; /***/ }), -/***/ 96559: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55182: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['paymentcryptographydata'] = {}; -AWS.PaymentCryptographyData = Service.defineService('paymentcryptographydata', ['2022-02-03']); -Object.defineProperty(apiLoader.services['paymentcryptographydata'], '2022-02-03', { - get: function get() { - var model = __nccwpck_require__(68578); - model.paginators = (__nccwpck_require__(89757)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PaymentCryptographyData; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringEquals = void 0; +const stringEquals = (value1, value2) => value1 === value2; +exports.stringEquals = stringEquals; /***/ }), -/***/ 33696: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48305: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['personalize'] = {}; -AWS.Personalize = Service.defineService('personalize', ['2018-05-22']); -Object.defineProperty(apiLoader.services['personalize'], '2018-05-22', { - get: function get() { - var model = __nccwpck_require__(70169); - model.paginators = (__nccwpck_require__(64441)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Personalize; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.substring = void 0; +const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; +exports.substring = substring; /***/ }), -/***/ 88170: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6535: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['personalizeevents'] = {}; -AWS.PersonalizeEvents = Service.defineService('personalizeevents', ['2018-03-22']); -Object.defineProperty(apiLoader.services['personalizeevents'], '2018-03-22', { - get: function get() { - var model = __nccwpck_require__(3606); - model.paginators = (__nccwpck_require__(94507)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PersonalizeEvents; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uriEncode = void 0; +const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); +exports.uriEncode = uriEncode; /***/ }), -/***/ 66184: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 36563: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['personalizeruntime'] = {}; -AWS.PersonalizeRuntime = Service.defineService('personalizeruntime', ['2018-05-22']); -Object.defineProperty(apiLoader.services['personalizeruntime'], '2018-05-22', { - get: function get() { - var model = __nccwpck_require__(18824); - model.paginators = (__nccwpck_require__(8069)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PersonalizeRuntime; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpoint = void 0; +const debug_1 = __nccwpck_require__(27617); +const types_1 = __nccwpck_require__(57433); +const utils_1 = __nccwpck_require__(81114); +const resolveEndpoint = (ruleSetObject, options) => { + var _a, _b, _c, _d, _e, _f; + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, `${debug_1.debugId} Initial EndpointParams: ${(0, debug_1.toDebugString)(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = (_c = endpointParams[paramKey]) !== null && _c !== void 0 ? _c : paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new types_1.EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = (0, utils_1.evaluateRules)(rules, { endpointParams, logger, referenceRecord: {} }); + if ((_d = options.endpointParams) === null || _d === void 0 ? void 0 : _d.Endpoint) { + try { + const givenEndpoint = new URL(options.endpointParams.Endpoint); + const { protocol, port } = givenEndpoint; + endpoint.url.protocol = protocol; + endpoint.url.port = port; + } + catch (e) { + } + } + (_f = (_e = options.logger) === null || _e === void 0 ? void 0 : _e.debug) === null || _f === void 0 ? void 0 : _f.call(_e, `${debug_1.debugId} Resolved endpoint: ${(0, debug_1.toDebugString)(endpoint)}`); + return endpoint; +}; +exports.resolveEndpoint = resolveEndpoint; /***/ }), -/***/ 15505: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82605: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pi'] = {}; -AWS.PI = Service.defineService('pi', ['2018-02-27']); -Object.defineProperty(apiLoader.services['pi'], '2018-02-27', { - get: function get() { - var model = __nccwpck_require__(18761); - model.paginators = (__nccwpck_require__(84882)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PI; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointError = void 0; +class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} +exports.EndpointError = EndpointError; /***/ }), -/***/ 18388: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21261: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pinpoint'] = {}; -AWS.Pinpoint = Service.defineService('pinpoint', ['2016-12-01']); -Object.defineProperty(apiLoader.services['pinpoint'], '2016-12-01', { - get: function get() { - var model = __nccwpck_require__(40605); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Pinpoint; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 83060: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20312: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pinpointemail'] = {}; -AWS.PinpointEmail = Service.defineService('pinpointemail', ['2018-07-26']); -Object.defineProperty(apiLoader.services['pinpointemail'], '2018-07-26', { - get: function get() { - var model = __nccwpck_require__(55228); - model.paginators = (__nccwpck_require__(45172)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PinpointEmail; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 46605: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 56083: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pinpointsmsvoice'] = {}; -AWS.PinpointSMSVoice = Service.defineService('pinpointsmsvoice', ['2018-09-05']); -Object.defineProperty(apiLoader.services['pinpointsmsvoice'], '2018-09-05', { - get: function get() { - var model = __nccwpck_require__(98689); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PinpointSMSVoice; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 478: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21767: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pinpointsmsvoicev2'] = {}; -AWS.PinpointSMSVoiceV2 = Service.defineService('pinpointsmsvoicev2', ['2022-03-31']); -Object.defineProperty(apiLoader.services['pinpointsmsvoicev2'], '2022-03-31', { - get: function get() { - var model = __nccwpck_require__(88319); - model.paginators = (__nccwpck_require__(80650)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(6663)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PinpointSMSVoiceV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 14220: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pipes'] = {}; -AWS.Pipes = Service.defineService('pipes', ['2015-10-07']); -Object.defineProperty(apiLoader.services['pipes'], '2015-10-07', { - get: function get() { - var model = __nccwpck_require__(40616); - model.paginators = (__nccwpck_require__(17710)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Pipes; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(82605), exports); +tslib_1.__exportStar(__nccwpck_require__(21261), exports); +tslib_1.__exportStar(__nccwpck_require__(20312), exports); +tslib_1.__exportStar(__nccwpck_require__(56083), exports); +tslib_1.__exportStar(__nccwpck_require__(21767), exports); +tslib_1.__exportStar(__nccwpck_require__(41811), exports); /***/ }), -/***/ 97332: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 41811: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['polly'] = {}; -AWS.Polly = Service.defineService('polly', ['2016-06-10']); -__nccwpck_require__(53199); -Object.defineProperty(apiLoader.services['polly'], '2016-06-10', { - get: function get() { - var model = __nccwpck_require__(55078); - model.paginators = (__nccwpck_require__(77060)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Polly; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 92765: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 65075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['pricing'] = {}; -AWS.Pricing = Service.defineService('pricing', ['2017-10-15']); -Object.defineProperty(apiLoader.services['pricing'], '2017-10-15', { - get: function get() { - var model = __nccwpck_require__(22484); - model.paginators = (__nccwpck_require__(60369)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Pricing; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callFunction = void 0; +const tslib_1 = __nccwpck_require__(4351); +const lib = tslib_1.__importStar(__nccwpck_require__(83188)); +const evaluateExpression_1 = __nccwpck_require__(82980); +const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : (0, evaluateExpression_1.evaluateExpression)(arg, "arg", options)); + return fn.split(".").reduce((acc, key) => acc[key], lib)(...evaluatedArgs); +}; +exports.callFunction = callFunction; /***/ }), -/***/ 63088: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 77851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['privatenetworks'] = {}; -AWS.PrivateNetworks = Service.defineService('privatenetworks', ['2021-12-03']); -Object.defineProperty(apiLoader.services['privatenetworks'], '2021-12-03', { - get: function get() { - var model = __nccwpck_require__(46306); - model.paginators = (__nccwpck_require__(42771)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.PrivateNetworks; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateCondition = void 0; +const debug_1 = __nccwpck_require__(27617); +const types_1 = __nccwpck_require__(57433); +const callFunction_1 = __nccwpck_require__(65075); +const evaluateCondition = ({ assign, ...fnArgs }, options) => { + var _a, _b; + if (assign && assign in options.referenceRecord) { + throw new types_1.EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = (0, callFunction_1.callFunction)(fnArgs, options); + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `evaluateCondition: ${(0, debug_1.toDebugString)(fnArgs)} = ${(0, debug_1.toDebugString)(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; +exports.evaluateCondition = evaluateCondition; /***/ }), -/***/ 9275: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['proton'] = {}; -AWS.Proton = Service.defineService('proton', ['2020-07-20']); -Object.defineProperty(apiLoader.services['proton'], '2020-07-20', { - get: function get() { - var model = __nccwpck_require__(78577); - model.paginators = (__nccwpck_require__(14299)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(99338)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Proton; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateConditions = void 0; +const debug_1 = __nccwpck_require__(27617); +const evaluateCondition_1 = __nccwpck_require__(77851); +const evaluateConditions = (conditions = [], options) => { + var _a, _b; + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = (0, evaluateCondition_1.evaluateCondition)(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `assign: ${toAssign.name} := ${(0, debug_1.toDebugString)(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; +exports.evaluateConditions = evaluateConditions; /***/ }), -/***/ 71266: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['qldb'] = {}; -AWS.QLDB = Service.defineService('qldb', ['2019-01-02']); -Object.defineProperty(apiLoader.services['qldb'], '2019-01-02', { - get: function get() { - var model = __nccwpck_require__(71346); - model.paginators = (__nccwpck_require__(34265)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.QLDB; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateEndpointRule = void 0; +const debug_1 = __nccwpck_require__(27617); +const evaluateConditions_1 = __nccwpck_require__(59169); +const getEndpointHeaders_1 = __nccwpck_require__(88268); +const getEndpointProperties_1 = __nccwpck_require__(34973); +const getEndpointUrl_1 = __nccwpck_require__(23602); +const evaluateEndpointRule = (endpointRule, options) => { + var _a, _b; + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + (_b = (_a = options.logger) === null || _a === void 0 ? void 0 : _a.debug) === null || _b === void 0 ? void 0 : _b.call(_a, debug_1.debugId, `Resolving endpoint from template: ${(0, debug_1.toDebugString)(endpoint)}`); + return { + ...(headers != undefined && { + headers: (0, getEndpointHeaders_1.getEndpointHeaders)(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: (0, getEndpointProperties_1.getEndpointProperties)(properties, endpointRuleOptions), + }), + url: (0, getEndpointUrl_1.getEndpointUrl)(url, endpointRuleOptions), + }; +}; +exports.evaluateEndpointRule = evaluateEndpointRule; /***/ }), -/***/ 55423: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 12110: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['qldbsession'] = {}; -AWS.QLDBSession = Service.defineService('qldbsession', ['2019-07-11']); -Object.defineProperty(apiLoader.services['qldbsession'], '2019-07-11', { - get: function get() { - var model = __nccwpck_require__(60040); - model.paginators = (__nccwpck_require__(61051)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.QLDBSession; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateErrorRule = void 0; +const types_1 = __nccwpck_require__(57433); +const evaluateConditions_1 = __nccwpck_require__(59169); +const evaluateExpression_1 = __nccwpck_require__(82980); +const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + throw new types_1.EndpointError((0, evaluateExpression_1.evaluateExpression)(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; +exports.evaluateErrorRule = evaluateErrorRule; /***/ }), -/***/ 29898: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['quicksight'] = {}; -AWS.QuickSight = Service.defineService('quicksight', ['2018-04-01']); -Object.defineProperty(apiLoader.services['quicksight'], '2018-04-01', { - get: function get() { - var model = __nccwpck_require__(8419); - model.paginators = (__nccwpck_require__(43387)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.QuickSight; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateExpression = void 0; +const types_1 = __nccwpck_require__(57433); +const callFunction_1 = __nccwpck_require__(65075); +const evaluateTemplate_1 = __nccwpck_require__(57535); +const getReferenceValue_1 = __nccwpck_require__(68810); +const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return (0, evaluateTemplate_1.evaluateTemplate)(obj, options); + } + else if (obj["fn"]) { + return (0, callFunction_1.callFunction)(obj, options); + } + else if (obj["ref"]) { + return (0, getReferenceValue_1.getReferenceValue)(obj, options); + } + throw new types_1.EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; +exports.evaluateExpression = evaluateExpression; /***/ }), -/***/ 94394: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59738: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ram'] = {}; -AWS.RAM = Service.defineService('ram', ['2018-01-04']); -Object.defineProperty(apiLoader.services['ram'], '2018-01-04', { - get: function get() { - var model = __nccwpck_require__(61375); - model.paginators = (__nccwpck_require__(85336)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RAM; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateRules = void 0; +const types_1 = __nccwpck_require__(57433); +const evaluateEndpointRule_1 = __nccwpck_require__(35324); +const evaluateErrorRule_1 = __nccwpck_require__(12110); +const evaluateTreeRule_1 = __nccwpck_require__(26587); +const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = (0, evaluateEndpointRule_1.evaluateEndpointRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + (0, evaluateErrorRule_1.evaluateErrorRule)(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = (0, evaluateTreeRule_1.evaluateTreeRule)(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new types_1.EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new types_1.EndpointError(`Rules evaluation failed`); +}; +exports.evaluateRules = evaluateRules; /***/ }), -/***/ 70145: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57535: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rbin'] = {}; -AWS.Rbin = Service.defineService('rbin', ['2021-06-15']); -Object.defineProperty(apiLoader.services['rbin'], '2021-06-15', { - get: function get() { - var model = __nccwpck_require__(18897); - model.paginators = (__nccwpck_require__(57601)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Rbin; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateTemplate = void 0; +const lib_1 = __nccwpck_require__(83188); +const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push((0, lib_1.getAttr)(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; +exports.evaluateTemplate = evaluateTemplate; /***/ }), -/***/ 71578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 26587: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rds'] = {}; -AWS.RDS = Service.defineService('rds', ['2013-01-10', '2013-02-12', '2013-09-09', '2014-09-01', '2014-09-01*', '2014-10-31']); -__nccwpck_require__(71928); -Object.defineProperty(apiLoader.services['rds'], '2013-01-10', { - get: function get() { - var model = __nccwpck_require__(59989); - model.paginators = (__nccwpck_require__(978)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['rds'], '2013-02-12', { - get: function get() { - var model = __nccwpck_require__(55061); - model.paginators = (__nccwpck_require__(39581)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['rds'], '2013-09-09', { - get: function get() { - var model = __nccwpck_require__(36331); - model.paginators = (__nccwpck_require__(14485)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(36851)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['rds'], '2014-09-01', { - get: function get() { - var model = __nccwpck_require__(19226); - model.paginators = (__nccwpck_require__(49863)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); -Object.defineProperty(apiLoader.services['rds'], '2014-10-31', { - get: function get() { - var model = __nccwpck_require__(91916); - model.paginators = (__nccwpck_require__(85082)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(20371)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RDS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateTreeRule = void 0; +const evaluateConditions_1 = __nccwpck_require__(59169); +const evaluateRules_1 = __nccwpck_require__(59738); +const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = (0, evaluateConditions_1.evaluateConditions)(conditions, options); + if (!result) { + return; + } + return (0, evaluateRules_1.evaluateRules)(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; +exports.evaluateTreeRule = evaluateTreeRule; /***/ }), -/***/ 30147: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 88268: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rdsdataservice'] = {}; -AWS.RDSDataService = Service.defineService('rdsdataservice', ['2018-08-01']); -__nccwpck_require__(64070); -Object.defineProperty(apiLoader.services['rdsdataservice'], '2018-08-01', { - get: function get() { - var model = __nccwpck_require__(13559); - model.paginators = (__nccwpck_require__(41160)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RDSDataService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointHeaders = void 0; +const types_1 = __nccwpck_require__(57433); +const evaluateExpression_1 = __nccwpck_require__(82980); +const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = (0, evaluateExpression_1.evaluateExpression)(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new types_1.EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); +exports.getEndpointHeaders = getEndpointHeaders; /***/ }), -/***/ 84853: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 34973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['redshift'] = {}; -AWS.Redshift = Service.defineService('redshift', ['2012-12-01']); -Object.defineProperty(apiLoader.services['redshift'], '2012-12-01', { - get: function get() { - var model = __nccwpck_require__(24827); - model.paginators = (__nccwpck_require__(88012)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(79011)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Redshift; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointProperties = void 0; +const getEndpointProperty_1 = __nccwpck_require__(42978); +const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: (0, getEndpointProperty_1.getEndpointProperty)(propertyVal, options), +}), {}); +exports.getEndpointProperties = getEndpointProperties; /***/ }), -/***/ 203: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['redshiftdata'] = {}; -AWS.RedshiftData = Service.defineService('redshiftdata', ['2019-12-20']); -Object.defineProperty(apiLoader.services['redshiftdata'], '2019-12-20', { - get: function get() { - var model = __nccwpck_require__(85203); - model.paginators = (__nccwpck_require__(27797)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RedshiftData; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointProperty = void 0; +const types_1 = __nccwpck_require__(57433); +const evaluateTemplate_1 = __nccwpck_require__(57535); +const getEndpointProperties_1 = __nccwpck_require__(34973); +const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => (0, exports.getEndpointProperty)(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return (0, evaluateTemplate_1.evaluateTemplate)(property, options); + case "object": + if (property === null) { + throw new types_1.EndpointError(`Unexpected endpoint property: ${property}`); + } + return (0, getEndpointProperties_1.getEndpointProperties)(property, options); + case "boolean": + return property; + default: + throw new types_1.EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; +exports.getEndpointProperty = getEndpointProperty; /***/ }), -/***/ 29987: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 23602: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['redshiftserverless'] = {}; -AWS.RedshiftServerless = Service.defineService('redshiftserverless', ['2021-04-21']); -Object.defineProperty(apiLoader.services['redshiftserverless'], '2021-04-21', { - get: function get() { - var model = __nccwpck_require__(95705); - model.paginators = (__nccwpck_require__(892)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RedshiftServerless; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointUrl = void 0; +const types_1 = __nccwpck_require__(57433); +const evaluateExpression_1 = __nccwpck_require__(82980); +const getEndpointUrl = (endpointUrl, options) => { + const expression = (0, evaluateExpression_1.evaluateExpression)(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new types_1.EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; +exports.getEndpointUrl = getEndpointUrl; /***/ }), -/***/ 65470: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 68810: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rekognition'] = {}; -AWS.Rekognition = Service.defineService('rekognition', ['2016-06-27']); -Object.defineProperty(apiLoader.services['rekognition'], '2016-06-27', { - get: function get() { - var model = __nccwpck_require__(66442); - model.paginators = (__nccwpck_require__(37753)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(78910)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Rekognition; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getReferenceValue = void 0; +const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; +exports.getReferenceValue = getReferenceValue; /***/ }), -/***/ 21173: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81114: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['resiliencehub'] = {}; -AWS.Resiliencehub = Service.defineService('resiliencehub', ['2020-04-30']); -Object.defineProperty(apiLoader.services['resiliencehub'], '2020-04-30', { - get: function get() { - var model = __nccwpck_require__(3885); - model.paginators = (__nccwpck_require__(38750)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Resiliencehub; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(59738), exports); /***/ }), -/***/ 74071: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98095: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['resourceexplorer2'] = {}; -AWS.ResourceExplorer2 = Service.defineService('resourceexplorer2', ['2022-07-28']); -Object.defineProperty(apiLoader.services['resourceexplorer2'], '2022-07-28', { - get: function get() { - var model = __nccwpck_require__(26515); - model.paginators = (__nccwpck_require__(8580)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ResourceExplorer2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultUserAgent = exports.UA_APP_ID_INI_NAME = exports.UA_APP_ID_ENV_NAME = void 0; +const node_config_provider_1 = __nccwpck_require__(33461); +const os_1 = __nccwpck_require__(22037); +const process_1 = __nccwpck_require__(77282); +const is_crt_available_1 = __nccwpck_require__(68390); +exports.UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +exports.UA_APP_ID_INI_NAME = "sdk-ua-app-id"; +const defaultUserAgent = ({ serviceId, clientVersion }) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.0"], + [`os/${(0, os_1.platform)()}`, (0, os_1.release)()], + ["lang/js"], + ["md/nodejs", `${process_1.versions.node}`], + ]; + const crtAvailable = (0, is_crt_available_1.isCrtAvailable)(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (process_1.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${process_1.env.AWS_EXECUTION_ENV}`]); + } + const appIdPromise = (0, node_config_provider_1.loadConfig)({ + environmentVariableSelector: (env) => env[exports.UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[exports.UA_APP_ID_INI_NAME], + default: undefined, + })(); + let resolvedUserAgent = undefined; + return async () => { + if (!resolvedUserAgent) { + const appId = await appIdPromise; + resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + } + return resolvedUserAgent; + }; +}; +exports.defaultUserAgent = defaultUserAgent; /***/ }), -/***/ 58756: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 68390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['resourcegroups'] = {}; -AWS.ResourceGroups = Service.defineService('resourcegroups', ['2017-11-27']); -Object.defineProperty(apiLoader.services['resourcegroups'], '2017-11-27', { - get: function get() { - var model = __nccwpck_require__(73621); - model.paginators = (__nccwpck_require__(24085)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ResourceGroups; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCrtAvailable = void 0; +const isCrtAvailable = () => { + try { + if ( true && __nccwpck_require__(87578)) { + return ["md/crt-avail"]; + } + return null; + } + catch (e) { + return null; + } +}; +exports.isCrtAvailable = isCrtAvailable; /***/ }), -/***/ 7385: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28172: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['resourcegroupstaggingapi'] = {}; -AWS.ResourceGroupsTaggingAPI = Service.defineService('resourcegroupstaggingapi', ['2017-01-26']); -Object.defineProperty(apiLoader.services['resourcegroupstaggingapi'], '2017-01-26', { - get: function get() { - var model = __nccwpck_require__(71720); - model.paginators = (__nccwpck_require__(36635)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ResourceGroupsTaggingAPI; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +const pureJs_1 = __nccwpck_require__(21590); +const whatwgEncodingApi_1 = __nccwpck_require__(89215); +const fromUtf8 = (input) => typeof TextEncoder === "function" ? (0, whatwgEncodingApi_1.fromUtf8)(input) : (0, pureJs_1.fromUtf8)(input); +exports.fromUtf8 = fromUtf8; +const toUtf8 = (input) => typeof TextDecoder === "function" ? (0, whatwgEncodingApi_1.toUtf8)(input) : (0, pureJs_1.toUtf8)(input); +exports.toUtf8 = toUtf8; /***/ }), -/***/ 18068: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21590: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['robomaker'] = {}; -AWS.RoboMaker = Service.defineService('robomaker', ['2018-06-29']); -Object.defineProperty(apiLoader.services['robomaker'], '2018-06-29', { - get: function get() { - var model = __nccwpck_require__(6904); - model.paginators = (__nccwpck_require__(43495)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RoboMaker; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +const fromUtf8 = (input) => { + const bytes = []; + for (let i = 0, len = input.length; i < len; i++) { + const value = input.charCodeAt(i); + if (value < 0x80) { + bytes.push(value); + } + else if (value < 0x800) { + bytes.push((value >> 6) | 0b11000000, (value & 0b111111) | 0b10000000); + } + else if (i + 1 < input.length && (value & 0xfc00) === 0xd800 && (input.charCodeAt(i + 1) & 0xfc00) === 0xdc00) { + const surrogatePair = 0x10000 + ((value & 0b1111111111) << 10) + (input.charCodeAt(++i) & 0b1111111111); + bytes.push((surrogatePair >> 18) | 0b11110000, ((surrogatePair >> 12) & 0b111111) | 0b10000000, ((surrogatePair >> 6) & 0b111111) | 0b10000000, (surrogatePair & 0b111111) | 0b10000000); + } + else { + bytes.push((value >> 12) | 0b11100000, ((value >> 6) & 0b111111) | 0b10000000, (value & 0b111111) | 0b10000000); + } + } + return Uint8Array.from(bytes); +}; +exports.fromUtf8 = fromUtf8; +const toUtf8 = (input) => { + let decoded = ""; + for (let i = 0, len = input.length; i < len; i++) { + const byte = input[i]; + if (byte < 0x80) { + decoded += String.fromCharCode(byte); + } + else if (0b11000000 <= byte && byte < 0b11100000) { + const nextByte = input[++i]; + decoded += String.fromCharCode(((byte & 0b11111) << 6) | (nextByte & 0b111111)); + } + else if (0b11110000 <= byte && byte < 0b101101101) { + const surrogatePair = [byte, input[++i], input[++i], input[++i]]; + const encoded = "%" + surrogatePair.map((byteValue) => byteValue.toString(16)).join("%"); + decoded += decodeURIComponent(encoded); + } + else { + decoded += String.fromCharCode(((byte & 0b1111) << 12) | ((input[++i] & 0b111111) << 6) | (input[++i] & 0b111111)); + } + } + return decoded; +}; +exports.toUtf8 = toUtf8; /***/ }), -/***/ 83604: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89215: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rolesanywhere'] = {}; -AWS.RolesAnywhere = Service.defineService('rolesanywhere', ['2018-05-10']); -Object.defineProperty(apiLoader.services['rolesanywhere'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(80801); - model.paginators = (__nccwpck_require__(65955)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RolesAnywhere; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = exports.fromUtf8 = void 0; +function fromUtf8(input) { + return new TextEncoder().encode(input); +} +exports.fromUtf8 = fromUtf8; +function toUtf8(input) { + return new TextDecoder("utf-8").decode(input); +} +exports.toUtf8 = toUtf8; /***/ }), -/***/ 44968: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 43779: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53'] = {}; -AWS.Route53 = Service.defineService('route53', ['2013-04-01']); -__nccwpck_require__(69627); -Object.defineProperty(apiLoader.services['route53'], '2013-04-01', { - get: function get() { - var model = __nccwpck_require__(20959); - model.paginators = (__nccwpck_require__(46456)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(28347)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_DUALSTACK_ENDPOINT = exports.CONFIG_USE_DUALSTACK_ENDPOINT = exports.ENV_USE_DUALSTACK_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(83375); +exports.ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +exports.CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +exports.DEFAULT_USE_DUALSTACK_ENDPOINT = false; +exports.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_DUALSTACK_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; /***/ }), -/***/ 51994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 17994: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53domains'] = {}; -AWS.Route53Domains = Service.defineService('route53domains', ['2014-05-15']); -Object.defineProperty(apiLoader.services['route53domains'], '2014-05-15', { - get: function get() { - var model = __nccwpck_require__(57598); - model.paginators = (__nccwpck_require__(52189)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53Domains; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = exports.DEFAULT_USE_FIPS_ENDPOINT = exports.CONFIG_USE_FIPS_ENDPOINT = exports.ENV_USE_FIPS_ENDPOINT = void 0; +const util_config_provider_1 = __nccwpck_require__(83375); +exports.ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +exports.CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +exports.DEFAULT_USE_FIPS_ENDPOINT = false; +exports.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, util_config_provider_1.booleanSelector)(env, exports.ENV_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.ENV), + configFileSelector: (profile) => (0, util_config_provider_1.booleanSelector)(profile, exports.CONFIG_USE_FIPS_ENDPOINT, util_config_provider_1.SelectorType.CONFIG), + default: false, +}; /***/ }), -/***/ 35738: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 18421: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53recoverycluster'] = {}; -AWS.Route53RecoveryCluster = Service.defineService('route53recoverycluster', ['2019-12-02']); -Object.defineProperty(apiLoader.services['route53recoverycluster'], '2019-12-02', { - get: function get() { - var model = __nccwpck_require__(73989); - model.paginators = (__nccwpck_require__(69118)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53RecoveryCluster; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(43779), exports); +tslib_1.__exportStar(__nccwpck_require__(17994), exports); +tslib_1.__exportStar(__nccwpck_require__(37432), exports); +tslib_1.__exportStar(__nccwpck_require__(61892), exports); /***/ }), -/***/ 16063: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37432: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53recoverycontrolconfig'] = {}; -AWS.Route53RecoveryControlConfig = Service.defineService('route53recoverycontrolconfig', ['2020-11-02']); -Object.defineProperty(apiLoader.services['route53recoverycontrolconfig'], '2020-11-02', { - get: function get() { - var model = __nccwpck_require__(38334); - model.paginators = (__nccwpck_require__(19728)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(57184)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53RecoveryControlConfig; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveCustomEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const resolveCustomEndpointsConfig = (input) => { + var _a, _b; + const { endpoint, urlParser } = input; + return { + ...input, + tls: (_a = input.tls) !== null && _a !== void 0 ? _a : true, + endpoint: (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + }; +}; +exports.resolveCustomEndpointsConfig = resolveCustomEndpointsConfig; /***/ }), -/***/ 79106: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 61892: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53recoveryreadiness'] = {}; -AWS.Route53RecoveryReadiness = Service.defineService('route53recoveryreadiness', ['2019-12-02']); -Object.defineProperty(apiLoader.services['route53recoveryreadiness'], '2019-12-02', { - get: function get() { - var model = __nccwpck_require__(40156); - model.paginators = (__nccwpck_require__(74839)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53RecoveryReadiness; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointsConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const getEndpointFromRegion_1 = __nccwpck_require__(48570); +const resolveEndpointsConfig = (input) => { + var _a, _b; + const useDualstackEndpoint = (0, util_middleware_1.normalizeProvider)((_a = input.useDualstackEndpoint) !== null && _a !== void 0 ? _a : false); + const { endpoint, useFipsEndpoint, urlParser } = input; + return { + ...input, + tls: (_b = input.tls) !== null && _b !== void 0 ? _b : true, + endpoint: endpoint + ? (0, util_middleware_1.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => (0, getEndpointFromRegion_1.getEndpointFromRegion)({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }; +}; +exports.resolveEndpointsConfig = resolveEndpointsConfig; /***/ }), -/***/ 25894: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 48570: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['route53resolver'] = {}; -AWS.Route53Resolver = Service.defineService('route53resolver', ['2018-04-01']); -Object.defineProperty(apiLoader.services['route53resolver'], '2018-04-01', { - get: function get() { - var model = __nccwpck_require__(89229); - model.paginators = (__nccwpck_require__(95050)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Route53Resolver; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromRegion = void 0; +const getEndpointFromRegion = async (input) => { + var _a; + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (_a = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }))) !== null && _a !== void 0 ? _a : {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; +exports.getEndpointFromRegion = getEndpointFromRegion; /***/ }), -/***/ 53237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 53098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['rum'] = {}; -AWS.RUM = Service.defineService('rum', ['2018-05-10']); -Object.defineProperty(apiLoader.services['rum'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(84126); - model.paginators = (__nccwpck_require__(79432)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.RUM; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(18421), exports); +tslib_1.__exportStar(__nccwpck_require__(221), exports); +tslib_1.__exportStar(__nccwpck_require__(86985), exports); /***/ }), -/***/ 83256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 33898: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['s3'] = {}; -AWS.S3 = Service.defineService('s3', ['2006-03-01']); -__nccwpck_require__(26543); -Object.defineProperty(apiLoader.services['s3'], '2006-03-01', { - get: function get() { - var model = __nccwpck_require__(1129); - model.paginators = (__nccwpck_require__(7265)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(74048)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.S3; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_REGION_CONFIG_FILE_OPTIONS = exports.NODE_REGION_CONFIG_OPTIONS = exports.REGION_INI_NAME = exports.REGION_ENV_NAME = void 0; +exports.REGION_ENV_NAME = "AWS_REGION"; +exports.REGION_INI_NAME = "region"; +exports.NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.REGION_ENV_NAME], + configFileSelector: (profile) => profile[exports.REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +exports.NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; /***/ }), -/***/ 99817: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 49506: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['s3control'] = {}; -AWS.S3Control = Service.defineService('s3control', ['2018-08-20']); -__nccwpck_require__(71207); -Object.defineProperty(apiLoader.services['s3control'], '2018-08-20', { - get: function get() { - var model = __nccwpck_require__(1201); - model.paginators = (__nccwpck_require__(55527)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.S3Control; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRealRegion = void 0; +const isFipsRegion_1 = __nccwpck_require__(43870); +const getRealRegion = (region) => (0, isFipsRegion_1.isFipsRegion)(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; +exports.getRealRegion = getRealRegion; /***/ }), -/***/ 90493: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['s3outposts'] = {}; -AWS.S3Outposts = Service.defineService('s3outposts', ['2017-07-25']); -Object.defineProperty(apiLoader.services['s3outposts'], '2017-07-25', { - get: function get() { - var model = __nccwpck_require__(79971); - model.paginators = (__nccwpck_require__(32505)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.S3Outposts; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(33898), exports); +tslib_1.__exportStar(__nccwpck_require__(87065), exports); /***/ }), -/***/ 77657: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 43870: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemaker'] = {}; -AWS.SageMaker = Service.defineService('sagemaker', ['2017-07-24']); -Object.defineProperty(apiLoader.services['sagemaker'], '2017-07-24', { - get: function get() { - var model = __nccwpck_require__(71132); - model.paginators = (__nccwpck_require__(69254)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(80824)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SageMaker; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isFipsRegion = void 0; +const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); +exports.isFipsRegion = isFipsRegion; /***/ }), -/***/ 38966: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 87065: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemakeredge'] = {}; -AWS.SagemakerEdge = Service.defineService('sagemakeredge', ['2020-09-23']); -Object.defineProperty(apiLoader.services['sagemakeredge'], '2020-09-23', { - get: function get() { - var model = __nccwpck_require__(97093); - model.paginators = (__nccwpck_require__(71636)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SagemakerEdge; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRegionConfig = void 0; +const getRealRegion_1 = __nccwpck_require__(49506); +const isFipsRegion_1 = __nccwpck_require__(43870); +const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return { + ...input, + region: async () => { + if (typeof region === "string") { + return (0, getRealRegion_1.getRealRegion)(region); + } + const providedRegion = await region(); + return (0, getRealRegion_1.getRealRegion)(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if ((0, isFipsRegion_1.isFipsRegion)(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }; +}; +exports.resolveRegionConfig = resolveRegionConfig; /***/ }), -/***/ 67644: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 19814: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemakerfeaturestoreruntime'] = {}; -AWS.SageMakerFeatureStoreRuntime = Service.defineService('sagemakerfeaturestoreruntime', ['2020-07-01']); -Object.defineProperty(apiLoader.services['sagemakerfeaturestoreruntime'], '2020-07-01', { - get: function get() { - var model = __nccwpck_require__(75546); - model.paginators = (__nccwpck_require__(12151)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SageMakerFeatureStoreRuntime; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 4707: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 14832: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemakergeospatial'] = {}; -AWS.SageMakerGeospatial = Service.defineService('sagemakergeospatial', ['2020-05-27']); -Object.defineProperty(apiLoader.services['sagemakergeospatial'], '2020-05-27', { - get: function get() { - var model = __nccwpck_require__(26059); - model.paginators = (__nccwpck_require__(99606)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SageMakerGeospatial; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 28199: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 99760: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemakermetrics'] = {}; -AWS.SageMakerMetrics = Service.defineService('sagemakermetrics', ['2022-09-30']); -Object.defineProperty(apiLoader.services['sagemakermetrics'], '2022-09-30', { - get: function get() { - var model = __nccwpck_require__(89834); - model.paginators = (__nccwpck_require__(80107)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SageMakerMetrics; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHostnameFromVariants = void 0; +const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => { + var _a; + return (_a = variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))) === null || _a === void 0 ? void 0 : _a.hostname; +}; +exports.getHostnameFromVariants = getHostnameFromVariants; /***/ }), -/***/ 85044: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 77792: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sagemakerruntime'] = {}; -AWS.SageMakerRuntime = Service.defineService('sagemakerruntime', ['2017-05-13']); -Object.defineProperty(apiLoader.services['sagemakerruntime'], '2017-05-13', { - get: function get() { - var model = __nccwpck_require__(27032); - model.paginators = (__nccwpck_require__(7570)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SageMakerRuntime; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRegionInfo = void 0; +const getHostnameFromVariants_1 = __nccwpck_require__(99760); +const getResolvedHostname_1 = __nccwpck_require__(1487); +const getResolvedPartition_1 = __nccwpck_require__(44441); +const getResolvedSigningRegion_1 = __nccwpck_require__(92281); +const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + var _a, _b, _c, _d, _e, _f; + const partition = (0, getResolvedPartition_1.getResolvedPartition)(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : (_b = (_a = partitionHash[partition]) === null || _a === void 0 ? void 0 : _a.endpoint) !== null && _b !== void 0 ? _b : region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_c = regionHash[resolvedRegion]) === null || _c === void 0 ? void 0 : _c.variants, hostnameOptions); + const partitionHostname = (0, getHostnameFromVariants_1.getHostnameFromVariants)((_d = partitionHash[partition]) === null || _d === void 0 ? void 0 : _d.variants, hostnameOptions); + const hostname = (0, getResolvedHostname_1.getResolvedHostname)(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = (0, getResolvedSigningRegion_1.getResolvedSigningRegion)(hostname, { + signingRegion: (_e = regionHash[resolvedRegion]) === null || _e === void 0 ? void 0 : _e.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(((_f = regionHash[resolvedRegion]) === null || _f === void 0 ? void 0 : _f.signingService) && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; +exports.getRegionInfo = getRegionInfo; /***/ }), -/***/ 62825: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1487: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['savingsplans'] = {}; -AWS.SavingsPlans = Service.defineService('savingsplans', ['2019-06-28']); -Object.defineProperty(apiLoader.services['savingsplans'], '2019-06-28', { - get: function get() { - var model = __nccwpck_require__(46879); - model.paginators = (__nccwpck_require__(78998)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SavingsPlans; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedHostname = void 0; +const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; +exports.getResolvedHostname = getResolvedHostname; /***/ }), -/***/ 94840: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 44441: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['scheduler'] = {}; -AWS.Scheduler = Service.defineService('scheduler', ['2021-06-30']); -Object.defineProperty(apiLoader.services['scheduler'], '2021-06-30', { - get: function get() { - var model = __nccwpck_require__(36876); - model.paginators = (__nccwpck_require__(54594)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Scheduler; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedPartition = void 0; +const getResolvedPartition = (region, { partitionHash }) => { var _a; return (_a = Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region))) !== null && _a !== void 0 ? _a : "aws"; }; +exports.getResolvedPartition = getResolvedPartition; /***/ }), -/***/ 55713: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 92281: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['schemas'] = {}; -AWS.Schemas = Service.defineService('schemas', ['2019-12-02']); -Object.defineProperty(apiLoader.services['schemas'], '2019-12-02', { - get: function get() { - var model = __nccwpck_require__(76626); - model.paginators = (__nccwpck_require__(34227)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(62213)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Schemas; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getResolvedSigningRegion = void 0; +const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; +exports.getResolvedSigningRegion = getResolvedSigningRegion; /***/ }), -/***/ 85131: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 86985: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['secretsmanager'] = {}; -AWS.SecretsManager = Service.defineService('secretsmanager', ['2017-10-17']); -Object.defineProperty(apiLoader.services['secretsmanager'], '2017-10-17', { - get: function get() { - var model = __nccwpck_require__(89470); - model.paginators = (__nccwpck_require__(25613)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SecretsManager; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(19814), exports); +tslib_1.__exportStar(__nccwpck_require__(14832), exports); +tslib_1.__exportStar(__nccwpck_require__(77792), exports); /***/ }), -/***/ 21550: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 18044: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['securityhub'] = {}; -AWS.SecurityHub = Service.defineService('securityhub', ['2018-10-26']); -Object.defineProperty(apiLoader.services['securityhub'], '2018-10-26', { - get: function get() { - var model = __nccwpck_require__(29208); - model.paginators = (__nccwpck_require__(85595)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SecurityHub; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Endpoint = void 0; +var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint = exports.Endpoint || (exports.Endpoint = {})); /***/ }), -/***/ 84296: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57342: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['securitylake'] = {}; -AWS.SecurityLake = Service.defineService('securitylake', ['2018-05-10']); -Object.defineProperty(apiLoader.services['securitylake'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(26935); - model.paginators = (__nccwpck_require__(42170)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SecurityLake; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ENDPOINT_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_NAME = exports.ENV_ENDPOINT_NAME = void 0; +exports.ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +exports.CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +exports.ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_NAME], + default: undefined, +}; /***/ }), -/***/ 62402: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80991: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['serverlessapplicationrepository'] = {}; -AWS.ServerlessApplicationRepository = Service.defineService('serverlessapplicationrepository', ['2017-09-08']); -Object.defineProperty(apiLoader.services['serverlessapplicationrepository'], '2017-09-08', { - get: function get() { - var model = __nccwpck_require__(68422); - model.paginators = (__nccwpck_require__(34864)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ServerlessApplicationRepository; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointMode = void 0; +var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode = exports.EndpointMode || (exports.EndpointMode = {})); /***/ }), -/***/ 822: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 88337: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['servicecatalog'] = {}; -AWS.ServiceCatalog = Service.defineService('servicecatalog', ['2015-12-10']); -Object.defineProperty(apiLoader.services['servicecatalog'], '2015-12-10', { - get: function get() { - var model = __nccwpck_require__(95500); - model.paginators = (__nccwpck_require__(21687)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ServiceCatalog; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ENDPOINT_MODE_CONFIG_OPTIONS = exports.CONFIG_ENDPOINT_MODE_NAME = exports.ENV_ENDPOINT_MODE_NAME = void 0; +const EndpointMode_1 = __nccwpck_require__(80991); +exports.ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +exports.CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +exports.ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[exports.CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode_1.EndpointMode.IPv4, +}; /***/ }), -/***/ 79068: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 89227: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['servicecatalogappregistry'] = {}; -AWS.ServiceCatalogAppRegistry = Service.defineService('servicecatalogappregistry', ['2020-06-24']); -Object.defineProperty(apiLoader.services['servicecatalogappregistry'], '2020-06-24', { - get: function get() { - var model = __nccwpck_require__(25697); - model.paginators = (__nccwpck_require__(28893)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ServiceCatalogAppRegistry; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromContainerMetadata = exports.ENV_CMDS_AUTH_TOKEN = exports.ENV_CMDS_RELATIVE_URI = exports.ENV_CMDS_FULL_URI = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const url_1 = __nccwpck_require__(57310); +const httpRequest_1 = __nccwpck_require__(32199); +const ImdsCredentials_1 = __nccwpck_require__(6894); +const RemoteProviderInit_1 = __nccwpck_require__(98533); +const retry_1 = __nccwpck_require__(91351); +exports.ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +exports.ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +exports.ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); + return () => (0, retry_1.retry)(async () => { + const requestOptions = await getCmdsUri(); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { + throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); + }, maxRetries); +}; +exports.fromContainerMetadata = fromContainerMetadata; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[exports.ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[exports.ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await (0, httpRequest_1.httpRequest)({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async () => { + if (process.env[exports.ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[exports.ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[exports.ENV_CMDS_FULL_URI]) { + const parsed = (0, url_1.parse)(process.env[exports.ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new property_provider_1.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, false); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new property_provider_1.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, false); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new property_provider_1.CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${exports.ENV_CMDS_RELATIVE_URI} or ${exports.ENV_CMDS_FULL_URI} environment` + + " variable is set", false); +}; /***/ }), -/***/ 91569: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52207: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['servicediscovery'] = {}; -AWS.ServiceDiscovery = Service.defineService('servicediscovery', ['2017-03-14']); -Object.defineProperty(apiLoader.services['servicediscovery'], '2017-03-14', { - get: function get() { - var model = __nccwpck_require__(22361); - model.paginators = (__nccwpck_require__(37798)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ServiceDiscovery; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromInstanceMetadata = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const httpRequest_1 = __nccwpck_require__(32199); +const ImdsCredentials_1 = __nccwpck_require__(6894); +const RemoteProviderInit_1 = __nccwpck_require__(98533); +const retry_1 = __nccwpck_require__(91351); +const getInstanceMetadataEndpoint_1 = __nccwpck_require__(92460); +const staticStabilityProvider_1 = __nccwpck_require__(74035); +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const fromInstanceMetadata = (init = {}) => (0, staticStabilityProvider_1.staticStabilityProvider)(getInstanceImdsProvider(init), { logger: init.logger }); +exports.fromInstanceMetadata = fromInstanceMetadata; +const getInstanceImdsProvider = (init) => { + let disableFetchToken = false; + const { timeout, maxRetries } = (0, RemoteProviderInit_1.providerConfigFromInit)(init); + const getCredentials = async (maxRetries, options) => { + const profile = (await (0, retry_1.retry)(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return (0, retry_1.retry)(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(profile, options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await (0, getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint)(); + if (disableFetchToken) { + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if ((error === null || error === void 0 ? void 0 : error.statusCode) === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + "x-aws-ec2-metadata-token": token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => (0, httpRequest_1.httpRequest)({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await (0, httpRequest_1.httpRequest)({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options) => { + const credsResponse = JSON.parse((await (0, httpRequest_1.httpRequest)({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!(0, ImdsCredentials_1.isImdsCredentials)(credsResponse)) { + throw new property_provider_1.CredentialsProviderError("Invalid response received from instance metadata service."); + } + return (0, ImdsCredentials_1.fromImdsCredentials)(credsResponse); +}; /***/ }), -/***/ 57800: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7477: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['servicequotas'] = {}; -AWS.ServiceQuotas = Service.defineService('servicequotas', ['2019-06-24']); -Object.defineProperty(apiLoader.services['servicequotas'], '2019-06-24', { - get: function get() { - var model = __nccwpck_require__(68850); - model.paginators = (__nccwpck_require__(63074)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.ServiceQuotas; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInstanceMetadataEndpoint = exports.httpRequest = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(89227), exports); +tslib_1.__exportStar(__nccwpck_require__(52207), exports); +tslib_1.__exportStar(__nccwpck_require__(98533), exports); +tslib_1.__exportStar(__nccwpck_require__(45036), exports); +var httpRequest_1 = __nccwpck_require__(32199); +Object.defineProperty(exports, "httpRequest", ({ enumerable: true, get: function () { return httpRequest_1.httpRequest; } })); +var getInstanceMetadataEndpoint_1 = __nccwpck_require__(92460); +Object.defineProperty(exports, "getInstanceMetadataEndpoint", ({ enumerable: true, get: function () { return getInstanceMetadataEndpoint_1.getInstanceMetadataEndpoint; } })); /***/ }), -/***/ 46816: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6894: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ses'] = {}; -AWS.SES = Service.defineService('ses', ['2010-12-01']); -Object.defineProperty(apiLoader.services['ses'], '2010-12-01', { - get: function get() { - var model = __nccwpck_require__(56693); - model.paginators = (__nccwpck_require__(9399)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(98229)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SES; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromImdsCredentials = exports.isImdsCredentials = void 0; +const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +exports.isImdsCredentials = isImdsCredentials; +const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), +}); +exports.fromImdsCredentials = fromImdsCredentials; /***/ }), -/***/ 20142: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 98533: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sesv2'] = {}; -AWS.SESV2 = Service.defineService('sesv2', ['2019-09-27']); -Object.defineProperty(apiLoader.services['sesv2'], '2019-09-27', { - get: function get() { - var model = __nccwpck_require__(69754); - model.paginators = (__nccwpck_require__(72405)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SESV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.providerConfigFromInit = exports.DEFAULT_MAX_RETRIES = exports.DEFAULT_TIMEOUT = void 0; +exports.DEFAULT_TIMEOUT = 1000; +exports.DEFAULT_MAX_RETRIES = 0; +const providerConfigFromInit = ({ maxRetries = exports.DEFAULT_MAX_RETRIES, timeout = exports.DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); +exports.providerConfigFromInit = providerConfigFromInit; /***/ }), -/***/ 20271: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 32199: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['shield'] = {}; -AWS.Shield = Service.defineService('shield', ['2016-06-02']); -Object.defineProperty(apiLoader.services['shield'], '2016-06-02', { - get: function get() { - var model = __nccwpck_require__(47061); - model.paginators = (__nccwpck_require__(54893)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Shield; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.httpRequest = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const buffer_1 = __nccwpck_require__(14300); +const http_1 = __nccwpck_require__(13685); +function httpRequest(options) { + return new Promise((resolve, reject) => { + var _a; + const req = (0, http_1.request)({ + method: "GET", + ...options, + hostname: (_a = options.hostname) === null || _a === void 0 ? void 0 : _a.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new property_provider_1.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new property_provider_1.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new property_provider_1.ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(buffer_1.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +exports.httpRequest = httpRequest; /***/ }), -/***/ 71596: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 91351: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['signer'] = {}; -AWS.Signer = Service.defineService('signer', ['2017-08-25']); -Object.defineProperty(apiLoader.services['signer'], '2017-08-25', { - get: function get() { - var model = __nccwpck_require__(97116); - model.paginators = (__nccwpck_require__(81027)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(48215)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Signer; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retry = void 0; +const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; +exports.retry = retry; /***/ }), -/***/ 10120: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 45036: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['simpledb'] = {}; -AWS.SimpleDB = Service.defineService('simpledb', ['2009-04-15']); -Object.defineProperty(apiLoader.services['simpledb'], '2009-04-15', { - get: function get() { - var model = __nccwpck_require__(45164); - model.paginators = (__nccwpck_require__(55255)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SimpleDB; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 37090: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 22666: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['simspaceweaver'] = {}; -AWS.SimSpaceWeaver = Service.defineService('simspaceweaver', ['2022-10-28']); -Object.defineProperty(apiLoader.services['simspaceweaver'], '2022-10-28', { - get: function get() { - var model = __nccwpck_require__(92139); - model.paginators = (__nccwpck_require__(31849)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SimSpaceWeaver; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExtendedInstanceMetadataCredentials = void 0; +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + var _a; + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + "credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: " + + STATIC_STABILITY_DOC_URL); + const originalExpiration = (_a = credentials.originalExpiration) !== null && _a !== void 0 ? _a : credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; +exports.getExtendedInstanceMetadataCredentials = getExtendedInstanceMetadataCredentials; /***/ }), -/***/ 57719: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 92460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sms'] = {}; -AWS.SMS = Service.defineService('sms', ['2016-10-24']); -Object.defineProperty(apiLoader.services['sms'], '2016-10-24', { - get: function get() { - var model = __nccwpck_require__(26534); - model.paginators = (__nccwpck_require__(98730)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SMS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInstanceMetadataEndpoint = void 0; +const node_config_provider_1 = __nccwpck_require__(33461); +const url_parser_1 = __nccwpck_require__(14681); +const Endpoint_1 = __nccwpck_require__(18044); +const EndpointConfigOptions_1 = __nccwpck_require__(57342); +const EndpointMode_1 = __nccwpck_require__(80991); +const EndpointModeConfigOptions_1 = __nccwpck_require__(88337); +const getInstanceMetadataEndpoint = async () => (0, url_parser_1.parseUrl)((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +exports.getInstanceMetadataEndpoint = getInstanceMetadataEndpoint; +const getFromEndpointConfig = async () => (0, node_config_provider_1.loadConfig)(EndpointConfigOptions_1.ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await (0, node_config_provider_1.loadConfig)(EndpointModeConfigOptions_1.ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode_1.EndpointMode.IPv4: + return Endpoint_1.Endpoint.IPv4; + case EndpointMode_1.EndpointMode.IPv6: + return Endpoint_1.Endpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode_1.EndpointMode)}`); + } +}; /***/ }), -/***/ 510: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74035: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['snowball'] = {}; -AWS.Snowball = Service.defineService('snowball', ['2016-06-30']); -Object.defineProperty(apiLoader.services['snowball'], '2016-06-30', { - get: function get() { - var model = __nccwpck_require__(96822); - model.paginators = (__nccwpck_require__(45219)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Snowball; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.staticStabilityProvider = void 0; +const getExtendedInstanceMetadataCredentials_1 = __nccwpck_require__(22666); +const staticStabilityProvider = (provider, options = {}) => { + const logger = (options === null || options === void 0 ? void 0 : options.logger) || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = (0, getExtendedInstanceMetadataCredentials_1.getExtendedInstanceMetadataCredentials)(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; +exports.staticStabilityProvider = staticStabilityProvider; /***/ }), -/***/ 64655: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 11014: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['snowdevicemanagement'] = {}; -AWS.SnowDeviceManagement = Service.defineService('snowdevicemanagement', ['2021-08-04']); -Object.defineProperty(apiLoader.services['snowdevicemanagement'], '2021-08-04', { - get: function get() { - var model = __nccwpck_require__(97413); - model.paginators = (__nccwpck_require__(70424)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SnowDeviceManagement; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EventStreamCodec = void 0; +const crc32_1 = __nccwpck_require__(47327); +const HeaderMarshaller_1 = __nccwpck_require__(74712); +const splitMessage_1 = __nccwpck_require__(20597); +class EventStreamCodec { + constructor(toUtf8, fromUtf8) { + this.headerMarshaller = new HeaderMarshaller_1.HeaderMarshaller(toUtf8, fromUtf8); + this.messageBuffer = []; + this.isEndOfStream = false; + } + feed(message) { + this.messageBuffer.push(this.decode(message)); + } + endOfStream() { + this.isEndOfStream = true; + } + getMessage() { + const message = this.messageBuffer.pop(); + const isEndOfStream = this.isEndOfStream; + return { + getMessage() { + return message; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + getAvailableMessages() { + const messages = this.messageBuffer; + this.messageBuffer = []; + const isEndOfStream = this.isEndOfStream; + return { + getMessages() { + return messages; + }, + isEndOfStream() { + return isEndOfStream; + }, + }; + } + encode({ headers: rawHeaders, body }) { + const headers = this.headerMarshaller.format(rawHeaders); + const length = headers.byteLength + body.byteLength + 16; + const out = new Uint8Array(length); + const view = new DataView(out.buffer, out.byteOffset, out.byteLength); + const checksum = new crc32_1.Crc32(); + view.setUint32(0, length, false); + view.setUint32(4, headers.byteLength, false); + view.setUint32(8, checksum.update(out.subarray(0, 8)).digest(), false); + out.set(headers, 12); + out.set(body, headers.byteLength + 12); + view.setUint32(length - 4, checksum.update(out.subarray(8, length - 4)).digest(), false); + return out; + } + decode(message) { + const { headers, body } = (0, splitMessage_1.splitMessage)(message); + return { headers: this.headerMarshaller.parse(headers), body }; + } + formatHeaders(rawHeaders) { + return this.headerMarshaller.format(rawHeaders); + } +} +exports.EventStreamCodec = EventStreamCodec; /***/ }), -/***/ 28581: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sns'] = {}; -AWS.SNS = Service.defineService('sns', ['2010-03-31']); -Object.defineProperty(apiLoader.services['sns'], '2010-03-31', { - get: function get() { - var model = __nccwpck_require__(64387); - model.paginators = (__nccwpck_require__(58054)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.SNS; - +/***/ 74712: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +"use strict"; -/***/ 63172: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HeaderMarshaller = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +const Int64_1 = __nccwpck_require__(46086); +class HeaderMarshaller { + constructor(toUtf8, fromUtf8) { + this.toUtf8 = toUtf8; + this.fromUtf8 = fromUtf8; + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = this.fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = this.fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64_1.Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set((0, util_hex_encoding_1.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } + parse(headers) { + const out = {}; + let position = 0; + while (position < headers.byteLength) { + const nameLength = headers.getUint8(position++); + const name = this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, nameLength)); + position += nameLength; + switch (headers.getUint8(position++)) { + case 0: + out[name] = { + type: BOOLEAN_TAG, + value: true, + }; + break; + case 1: + out[name] = { + type: BOOLEAN_TAG, + value: false, + }; + break; + case 2: + out[name] = { + type: BYTE_TAG, + value: headers.getInt8(position++), + }; + break; + case 3: + out[name] = { + type: SHORT_TAG, + value: headers.getInt16(position, false), + }; + position += 2; + break; + case 4: + out[name] = { + type: INT_TAG, + value: headers.getInt32(position, false), + }; + position += 4; + break; + case 5: + out[name] = { + type: LONG_TAG, + value: new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)), + }; + position += 8; + break; + case 6: + const binaryLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: BINARY_TAG, + value: new Uint8Array(headers.buffer, headers.byteOffset + position, binaryLength), + }; + position += binaryLength; + break; + case 7: + const stringLength = headers.getUint16(position, false); + position += 2; + out[name] = { + type: STRING_TAG, + value: this.toUtf8(new Uint8Array(headers.buffer, headers.byteOffset + position, stringLength)), + }; + position += stringLength; + break; + case 8: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date(new Int64_1.Int64(new Uint8Array(headers.buffer, headers.byteOffset + position, 8)).valueOf()), + }; + position += 8; + break; + case 9: + const uuidBytes = new Uint8Array(headers.buffer, headers.byteOffset + position, 16); + position += 16; + out[name] = { + type: UUID_TAG, + value: `${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(0, 4))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(4, 6))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(6, 8))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(8, 10))}-${(0, util_hex_encoding_1.toHex)(uuidBytes.subarray(10))}`, + }; + break; + default: + throw new Error(`Unrecognized header type tag`); + } + } + return out; + } +} +exports.HeaderMarshaller = HeaderMarshaller; +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const BOOLEAN_TAG = "boolean"; +const BYTE_TAG = "byte"; +const SHORT_TAG = "short"; +const INT_TAG = "integer"; +const LONG_TAG = "long"; +const BINARY_TAG = "binary"; +const STRING_TAG = "string"; +const TIMESTAMP_TAG = "timestamp"; +const UUID_TAG = "uuid"; +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; + + +/***/ }), + +/***/ 46086: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sqs'] = {}; -AWS.SQS = Service.defineService('sqs', ['2012-11-05']); -__nccwpck_require__(94571); -Object.defineProperty(apiLoader.services['sqs'], '2012-11-05', { - get: function get() { - var model = __nccwpck_require__(53974); - model.paginators = (__nccwpck_require__(17249)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SQS; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Int64 = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt((0, util_hex_encoding_1.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +exports.Int64 = Int64; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} /***/ }), -/***/ 83380: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 73684: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssm'] = {}; -AWS.SSM = Service.defineService('ssm', ['2014-11-06']); -Object.defineProperty(apiLoader.services['ssm'], '2014-11-06', { - get: function get() { - var model = __nccwpck_require__(44596); - model.paginators = (__nccwpck_require__(5135)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(98523)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSM; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 12577: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57255: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssmcontacts'] = {}; -AWS.SSMContacts = Service.defineService('ssmcontacts', ['2021-05-03']); -Object.defineProperty(apiLoader.services['ssmcontacts'], '2021-05-03', { - get: function get() { - var model = __nccwpck_require__(74831); - model.paginators = (__nccwpck_require__(63938)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSMContacts; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageDecoderStream = void 0; +class MessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const bytes of this.options.inputStream) { + const decoded = this.options.decoder.decode(bytes); + yield decoded; + } + } +} +exports.MessageDecoderStream = MessageDecoderStream; /***/ }), -/***/ 20590: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 52362: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssmincidents'] = {}; -AWS.SSMIncidents = Service.defineService('ssmincidents', ['2018-05-10']); -Object.defineProperty(apiLoader.services['ssmincidents'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(18719); - model.paginators = (__nccwpck_require__(4502)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(97755)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSMIncidents; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageEncoderStream = void 0; +class MessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const msg of this.options.messageStream) { + const encoded = this.options.encoder.encode(msg); + yield encoded; + } + if (this.options.includeEndFrame) { + yield new Uint8Array(0); + } + } +} +exports.MessageEncoderStream = MessageEncoderStream; /***/ }), -/***/ 44552: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 62379: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssmsap'] = {}; -AWS.SsmSap = Service.defineService('ssmsap', ['2018-05-10']); -Object.defineProperty(apiLoader.services['ssmsap'], '2018-05-10', { - get: function get() { - var model = __nccwpck_require__(49218); - model.paginators = (__nccwpck_require__(94718)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SsmSap; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageDecoderStream = void 0; +class SmithyMessageDecoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const message of this.options.messageStream) { + const deserialized = await this.options.deserializer(message); + if (deserialized === undefined) + continue; + yield deserialized; + } + } +} +exports.SmithyMessageDecoderStream = SmithyMessageDecoderStream; /***/ }), -/***/ 71096: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 12484: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sso'] = {}; -AWS.SSO = Service.defineService('sso', ['2019-06-10']); -Object.defineProperty(apiLoader.services['sso'], '2019-06-10', { - get: function get() { - var model = __nccwpck_require__(8027); - model.paginators = (__nccwpck_require__(36610)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSO; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SmithyMessageEncoderStream = void 0; +class SmithyMessageEncoderStream { + constructor(options) { + this.options = options; + } + [Symbol.asyncIterator]() { + return this.asyncIterator(); + } + async *asyncIterator() { + for await (const chunk of this.options.inputStream) { + const payloadBuf = this.options.serializer(chunk); + yield payloadBuf; + } + } +} +exports.SmithyMessageEncoderStream = SmithyMessageEncoderStream; /***/ }), -/***/ 66644: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 56459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssoadmin'] = {}; -AWS.SSOAdmin = Service.defineService('ssoadmin', ['2020-07-20']); -Object.defineProperty(apiLoader.services['ssoadmin'], '2020-07-20', { - get: function get() { - var model = __nccwpck_require__(7239); - model.paginators = (__nccwpck_require__(49402)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSOAdmin; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(11014), exports); +tslib_1.__exportStar(__nccwpck_require__(74712), exports); +tslib_1.__exportStar(__nccwpck_require__(46086), exports); +tslib_1.__exportStar(__nccwpck_require__(73684), exports); +tslib_1.__exportStar(__nccwpck_require__(57255), exports); +tslib_1.__exportStar(__nccwpck_require__(52362), exports); +tslib_1.__exportStar(__nccwpck_require__(62379), exports); +tslib_1.__exportStar(__nccwpck_require__(12484), exports); /***/ }), -/***/ 49870: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 20597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['ssooidc'] = {}; -AWS.SSOOIDC = Service.defineService('ssooidc', ['2019-06-10']); -Object.defineProperty(apiLoader.services['ssooidc'], '2019-06-10', { - get: function get() { - var model = __nccwpck_require__(62343); - model.paginators = (__nccwpck_require__(50215)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SSOOIDC; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitMessage = void 0; +const crc32_1 = __nccwpck_require__(47327); +const PRELUDE_MEMBER_LENGTH = 4; +const PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +const CHECKSUM_LENGTH = 4; +const MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +function splitMessage({ byteLength, byteOffset, buffer }) { + if (byteLength < MINIMUM_MESSAGE_LENGTH) { + throw new Error("Provided message too short to accommodate event stream message overhead"); + } + const view = new DataView(buffer, byteOffset, byteLength); + const messageLength = view.getUint32(0, false); + if (byteLength !== messageLength) { + throw new Error("Reported message length does not match received message length"); + } + const headerLength = view.getUint32(PRELUDE_MEMBER_LENGTH, false); + const expectedPreludeChecksum = view.getUint32(PRELUDE_LENGTH, false); + const expectedMessageChecksum = view.getUint32(byteLength - CHECKSUM_LENGTH, false); + const checksummer = new crc32_1.Crc32().update(new Uint8Array(buffer, byteOffset, PRELUDE_LENGTH)); + if (expectedPreludeChecksum !== checksummer.digest()) { + throw new Error(`The prelude checksum specified in the message (${expectedPreludeChecksum}) does not match the calculated CRC32 checksum (${checksummer.digest()})`); + } + checksummer.update(new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH, byteLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH))); + if (expectedMessageChecksum !== checksummer.digest()) { + throw new Error(`The message checksum (${checksummer.digest()}) did not match the expected value of ${expectedMessageChecksum}`); + } + return { + headers: new DataView(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH, headerLength), + body: new Uint8Array(buffer, byteOffset + PRELUDE_LENGTH + CHECKSUM_LENGTH + headerLength, messageLength - headerLength - (PRELUDE_LENGTH + CHECKSUM_LENGTH + CHECKSUM_LENGTH)), + }; +} +exports.splitMessage = splitMessage; /***/ }), -/***/ 8136: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 3081: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['stepfunctions'] = {}; -AWS.StepFunctions = Service.defineService('stepfunctions', ['2016-11-23']); -Object.defineProperty(apiLoader.services['stepfunctions'], '2016-11-23', { - get: function get() { - var model = __nccwpck_require__(85693); - model.paginators = (__nccwpck_require__(24818)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.StepFunctions; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Hash = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const util_utf8_1 = __nccwpck_require__(41895); +const buffer_1 = __nccwpck_require__(14300); +const crypto_1 = __nccwpck_require__(6113); +class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, util_utf8_1.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? (0, crypto_1.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) + : (0, crypto_1.createHash)(this.algorithmIdentifier); + } +} +exports.Hash = Hash; +function castSourceData(toCast, encoding) { + if (buffer_1.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, util_buffer_from_1.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, util_buffer_from_1.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, util_buffer_from_1.fromArrayBuffer)(toCast); +} /***/ }), -/***/ 89190: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 10780: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['storagegateway'] = {}; -AWS.StorageGateway = Service.defineService('storagegateway', ['2013-06-30']); -Object.defineProperty(apiLoader.services['storagegateway'], '2013-06-30', { - get: function get() { - var model = __nccwpck_require__(11069); - model.paginators = (__nccwpck_require__(33999)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.StorageGateway; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArrayBuffer = void 0; +const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; +exports.isArrayBuffer = isArrayBuffer; /***/ }), -/***/ 57513: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['sts'] = {}; -AWS.STS = Service.defineService('sts', ['2011-06-15']); -__nccwpck_require__(91055); -Object.defineProperty(apiLoader.services['sts'], '2011-06-15', { - get: function get() { - var model = __nccwpck_require__(80753); - model.paginators = (__nccwpck_require__(93639)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); - -module.exports = AWS.STS; - - -/***/ }), +/***/ 82800: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ 1099: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['support'] = {}; -AWS.Support = Service.defineService('support', ['2013-04-15']); -Object.defineProperty(apiLoader.services['support'], '2013-04-15', { - get: function get() { - var model = __nccwpck_require__(20767); - model.paginators = (__nccwpck_require__(62491)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getContentLengthPlugin = exports.contentLengthMiddlewareOptions = exports.contentLengthMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (protocol_http_1.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +exports.contentLengthMiddleware = contentLengthMiddleware; +exports.contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), exports.contentLengthMiddlewareOptions); + }, }); - -module.exports = AWS.Support; +exports.getContentLengthPlugin = getContentLengthPlugin; /***/ }), -/***/ 51288: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 465: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['supportapp'] = {}; -AWS.SupportApp = Service.defineService('supportapp', ['2021-08-20']); -Object.defineProperty(apiLoader.services['supportapp'], '2021-08-20', { - get: function get() { - var model = __nccwpck_require__(94851); - model.paginators = (__nccwpck_require__(60546)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SupportApp; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createConfigValueProvider = void 0; +const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + var _a; + const configValue = (_a = config[configKey]) !== null && _a !== void 0 ? _a : config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; +exports.createConfigValueProvider = createConfigValueProvider; /***/ }), -/***/ 32327: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 73929: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['swf'] = {}; -AWS.SWF = Service.defineService('swf', ['2012-01-25']); -__nccwpck_require__(31987); -Object.defineProperty(apiLoader.services['swf'], '2012-01-25', { - get: function get() { - var model = __nccwpck_require__(11144); - model.paginators = (__nccwpck_require__(48039)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.SWF; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveParams = exports.getEndpointFromInstructions = void 0; +const service_customizations_1 = __nccwpck_require__(13105); +const createConfigValueProvider_1 = __nccwpck_require__(465); +const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + const endpointParams = await (0, exports.resolveParams)(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +exports.getEndpointFromInstructions = getEndpointFromInstructions; +const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + var _a; + const endpointParams = {}; + const instructions = ((_a = instructionsSupplier === null || instructionsSupplier === void 0 ? void 0 : instructionsSupplier.getEndpointParameterInstructions) === null || _a === void 0 ? void 0 : _a.call(instructionsSupplier)) || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await (0, createConfigValueProvider_1.createConfigValueProvider)(instruction.name, name, clientConfig)(); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await (0, service_customizations_1.resolveParamsForS3)(endpointParams); + } + return endpointParams; +}; +exports.resolveParams = resolveParams; /***/ }), -/***/ 25910: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 50890: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['synthetics'] = {}; -AWS.Synthetics = Service.defineService('synthetics', ['2017-10-11']); -Object.defineProperty(apiLoader.services['synthetics'], '2017-10-11', { - get: function get() { - var model = __nccwpck_require__(78752); - model.paginators = (__nccwpck_require__(61615)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Synthetics; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(73929), exports); +tslib_1.__exportStar(__nccwpck_require__(38938), exports); /***/ }), -/***/ 58523: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 38938: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['textract'] = {}; -AWS.Textract = Service.defineService('textract', ['2018-06-27']); -Object.defineProperty(apiLoader.services['textract'], '2018-06-27', { - get: function get() { - var model = __nccwpck_require__(49753); - model.paginators = (__nccwpck_require__(16270)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Textract; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toEndpointV1 = void 0; +const url_parser_1 = __nccwpck_require__(14681); +const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, url_parser_1.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, url_parser_1.parseUrl)(endpoint); +}; +exports.toEndpointV1 = toEndpointV1; /***/ }), -/***/ 24529: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55520: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['timestreamquery'] = {}; -AWS.TimestreamQuery = Service.defineService('timestreamquery', ['2018-11-01']); -Object.defineProperty(apiLoader.services['timestreamquery'], '2018-11-01', { - get: function get() { - var model = __nccwpck_require__(70457); - model.paginators = (__nccwpck_require__(97217)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.TimestreamQuery; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.endpointMiddleware = void 0; +const getEndpointFromInstructions_1 = __nccwpck_require__(73929); +const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + var _a, _b; + const endpoint = await (0, getEndpointFromInstructions_1.getEndpointFromInstructions)(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = (_a = endpoint.properties) === null || _a === void 0 ? void 0 : _a.authSchemes; + const authScheme = (_b = context.authSchemes) === null || _b === void 0 ? void 0 : _b[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + } + return next({ + ...args, + }); + }; +}; +exports.endpointMiddleware = endpointMiddleware; /***/ }), -/***/ 1573: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71329: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['timestreamwrite'] = {}; -AWS.TimestreamWrite = Service.defineService('timestreamwrite', ['2018-11-01']); -Object.defineProperty(apiLoader.services['timestreamwrite'], '2018-11-01', { - get: function get() { - var model = __nccwpck_require__(8368); - model.paginators = (__nccwpck_require__(89653)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.TimestreamWrite; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointPlugin = exports.endpointMiddlewareOptions = void 0; +const middleware_serde_1 = __nccwpck_require__(81238); +const endpointMiddleware_1 = __nccwpck_require__(55520); +exports.endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: middleware_serde_1.serializerMiddlewareOption.name, +}; +const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, endpointMiddleware_1.endpointMiddleware)({ + config, + instructions, + }), exports.endpointMiddlewareOptions); + }, +}); +exports.getEndpointPlugin = getEndpointPlugin; /***/ }), -/***/ 15300: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82918: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['tnb'] = {}; -AWS.Tnb = Service.defineService('tnb', ['2008-10-21']); -Object.defineProperty(apiLoader.services['tnb'], '2008-10-21', { - get: function get() { - var model = __nccwpck_require__(1433); - model.paginators = (__nccwpck_require__(55995)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Tnb; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(50890), exports); +tslib_1.__exportStar(__nccwpck_require__(55520), exports); +tslib_1.__exportStar(__nccwpck_require__(71329), exports); +tslib_1.__exportStar(__nccwpck_require__(74139), exports); +tslib_1.__exportStar(__nccwpck_require__(39720), exports); /***/ }), -/***/ 75811: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74139: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['transcribeservice'] = {}; -AWS.TranscribeService = Service.defineService('transcribeservice', ['2017-10-26']); -Object.defineProperty(apiLoader.services['transcribeservice'], '2017-10-26', { - get: function get() { - var model = __nccwpck_require__(47294); - model.paginators = (__nccwpck_require__(25395)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.TranscribeService; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveEndpointConfig = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const toEndpointV1_1 = __nccwpck_require__(38938); +const resolveEndpointConfig = (input) => { + var _a, _b, _c; + const tls = (_a = input.tls) !== null && _a !== void 0 ? _a : true; + const { endpoint } = input; + const customEndpointProvider = endpoint != null ? async () => (0, toEndpointV1_1.toEndpointV1)(await (0, util_middleware_1.normalizeProvider)(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + return { + ...input, + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, util_middleware_1.normalizeProvider)((_b = input.useDualstackEndpoint) !== null && _b !== void 0 ? _b : false), + useFipsEndpoint: (0, util_middleware_1.normalizeProvider)((_c = input.useFipsEndpoint) !== null && _c !== void 0 ? _c : false), + }; +}; +exports.resolveEndpointConfig = resolveEndpointConfig; /***/ }), -/***/ 51585: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 13105: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['transfer'] = {}; -AWS.Transfer = Service.defineService('transfer', ['2018-11-05']); -Object.defineProperty(apiLoader.services['transfer'], '2018-11-05', { - get: function get() { - var model = __nccwpck_require__(93419); - model.paginators = (__nccwpck_require__(65803)/* .pagination */ .o); - model.waiters = (__nccwpck_require__(45405)/* .waiters */ .V); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Transfer; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(19194), exports); /***/ }), -/***/ 72544: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 19194: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['translate'] = {}; -AWS.Translate = Service.defineService('translate', ['2017-07-01']); -Object.defineProperty(apiLoader.services['translate'], '2017-07-01', { - get: function get() { - var model = __nccwpck_require__(61084); - model.paginators = (__nccwpck_require__(40304)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Translate; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArnBucketName = exports.isDnsCompatibleBucketName = exports.S3_HOSTNAME_PATTERN = exports.DOT_PATTERN = exports.resolveParamsForS3 = void 0; +const resolveParamsForS3 = async (endpointParams) => { + const bucket = (endpointParams === null || endpointParams === void 0 ? void 0 : endpointParams.Bucket) || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if ((0, exports.isArnBucketName)(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!(0, exports.isDnsCompatibleBucketName)(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +exports.resolveParamsForS3 = resolveParamsForS3; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +exports.DOT_PATTERN = /\./; +exports.S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +exports.isDnsCompatibleBucketName = isDnsCompatibleBucketName; +const isArnBucketName = (bucketName) => { + const [arn, partition, service, region, account, typeOrId] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = [arn, partition, service, account, typeOrId].filter(Boolean).length === 5; + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return arn === "arn" && !!partition && !!service && !!account && !!typeOrId; +}; +exports.isArnBucketName = isArnBucketName; /***/ }), -/***/ 28747: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 39720: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['voiceid'] = {}; -AWS.VoiceID = Service.defineService('voiceid', ['2021-09-27']); -Object.defineProperty(apiLoader.services['voiceid'], '2021-09-27', { - get: function get() { - var model = __nccwpck_require__(9375); - model.paginators = (__nccwpck_require__(59512)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.VoiceID; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 78952: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['vpclattice'] = {}; -AWS.VPCLattice = Service.defineService('vpclattice', ['2022-11-30']); -Object.defineProperty(apiLoader.services['vpclattice'], '2022-11-30', { - get: function get() { - var model = __nccwpck_require__(49656); - model.paginators = (__nccwpck_require__(98717)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.VPCLattice; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AdaptiveRetryStrategy = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const StandardRetryStrategy_1 = __nccwpck_require__(94582); +class AdaptiveRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options !== null && options !== void 0 ? options : {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new util_retry_1.DefaultRateLimiter(); + this.mode = util_retry_1.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; /***/ }), -/***/ 72742: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 94582: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['waf'] = {}; -AWS.WAF = Service.defineService('waf', ['2015-08-24']); -Object.defineProperty(apiLoader.services['waf'], '2015-08-24', { - get: function get() { - var model = __nccwpck_require__(37925); - model.paginators = (__nccwpck_require__(65794)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WAF; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StandardRetryStrategy = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const service_error_classification_1 = __nccwpck_require__(6375); +const util_retry_1 = __nccwpck_require__(84902); +const uuid_1 = __nccwpck_require__(75840); +const defaultRetryQuota_1 = __nccwpck_require__(29991); +const delayDecider_1 = __nccwpck_require__(9465); +const retryDecider_1 = __nccwpck_require__(67653); +const util_1 = __nccwpck_require__(42827); +class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + var _a, _b, _c; + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = util_retry_1.RETRY_MODES.STANDARD; + this.retryDecider = (_a = options === null || options === void 0 ? void 0 : options.retryDecider) !== null && _a !== void 0 ? _a : retryDecider_1.defaultRetryDecider; + this.delayDecider = (_b = options === null || options === void 0 ? void 0 : options.delayDecider) !== null && _b !== void 0 ? _b : delayDecider_1.defaultDelayDecider; + this.retryQuota = (_c = options === null || options === void 0 ? void 0 : options.retryQuota) !== null && _c !== void 0 ? _c : (0, defaultRetryQuota_1.getDefaultRetryQuota)(util_retry_1.INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = util_retry_1.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options === null || options === void 0 ? void 0 : options.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options === null || options === void 0 ? void 0 : options.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = (0, util_1.asSdkError)(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider((0, service_error_classification_1.isThrottlingError)(err) ? util_retry_1.THROTTLING_RETRY_DELAY_BASE : util_retry_1.DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +exports.StandardRetryStrategy = StandardRetryStrategy; +const getDelayFromRetryAfterHeader = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; /***/ }), -/***/ 23153: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 58709: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['wafregional'] = {}; -AWS.WAFRegional = Service.defineService('wafregional', ['2016-11-28']); -Object.defineProperty(apiLoader.services['wafregional'], '2016-11-28', { - get: function get() { - var model = __nccwpck_require__(20014); - model.paginators = (__nccwpck_require__(66829)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WAFRegional; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = exports.CONFIG_RETRY_MODE = exports.ENV_RETRY_MODE = exports.resolveRetryConfig = exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = exports.CONFIG_MAX_ATTEMPTS = exports.ENV_MAX_ATTEMPTS = void 0; +const util_middleware_1 = __nccwpck_require__(2390); +const util_retry_1 = __nccwpck_require__(84902); +exports.ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +exports.CONFIG_MAX_ATTEMPTS = "max_attempts"; +exports.NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[exports.ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${exports.ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[exports.CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${exports.CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: util_retry_1.DEFAULT_MAX_ATTEMPTS, +}; +const resolveRetryConfig = (input) => { + var _a; + const { retryStrategy } = input; + const maxAttempts = (0, util_middleware_1.normalizeProvider)((_a = input.maxAttempts) !== null && _a !== void 0 ? _a : util_retry_1.DEFAULT_MAX_ATTEMPTS); + return { + ...input, + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, util_middleware_1.normalizeProvider)(input.retryMode)(); + if (retryMode === util_retry_1.RETRY_MODES.ADAPTIVE) { + return new util_retry_1.AdaptiveRetryStrategy(maxAttempts); + } + return new util_retry_1.StandardRetryStrategy(maxAttempts); + }, + }; +}; +exports.resolveRetryConfig = resolveRetryConfig; +exports.ENV_RETRY_MODE = "AWS_RETRY_MODE"; +exports.CONFIG_RETRY_MODE = "retry_mode"; +exports.NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[exports.ENV_RETRY_MODE], + configFileSelector: (profile) => profile[exports.CONFIG_RETRY_MODE], + default: util_retry_1.DEFAULT_RETRY_MODE, +}; /***/ }), -/***/ 50353: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 29991: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['wafv2'] = {}; -AWS.WAFV2 = Service.defineService('wafv2', ['2019-07-29']); -Object.defineProperty(apiLoader.services['wafv2'], '2019-07-29', { - get: function get() { - var model = __nccwpck_require__(51872); - model.paginators = (__nccwpck_require__(33900)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WAFV2; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRetryQuota = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const getDefaultRetryQuota = (initialRetryTokens, options) => { + var _a, _b, _c; + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = (_a = options === null || options === void 0 ? void 0 : options.noRetryIncrement) !== null && _a !== void 0 ? _a : util_retry_1.NO_RETRY_INCREMENT; + const retryCost = (_b = options === null || options === void 0 ? void 0 : options.retryCost) !== null && _b !== void 0 ? _b : util_retry_1.RETRY_COST; + const timeoutRetryCost = (_c = options === null || options === void 0 ? void 0 : options.timeoutRetryCost) !== null && _c !== void 0 ? _c : util_retry_1.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount !== null && capacityReleaseAmount !== void 0 ? capacityReleaseAmount : noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; +exports.getDefaultRetryQuota = getDefaultRetryQuota; /***/ }), -/***/ 86263: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9465: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['wellarchitected'] = {}; -AWS.WellArchitected = Service.defineService('wellarchitected', ['2020-03-31']); -Object.defineProperty(apiLoader.services['wellarchitected'], '2020-03-31', { - get: function get() { - var model = __nccwpck_require__(19249); - model.paginators = (__nccwpck_require__(54693)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WellArchitected; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultDelayDecider = void 0; +const util_retry_1 = __nccwpck_require__(84902); +const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(util_retry_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); +exports.defaultDelayDecider = defaultDelayDecider; /***/ }), -/***/ 85266: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 96039: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['wisdom'] = {}; -AWS.Wisdom = Service.defineService('wisdom', ['2020-10-19']); -Object.defineProperty(apiLoader.services['wisdom'], '2020-10-19', { - get: function get() { - var model = __nccwpck_require__(94385); - model.paginators = (__nccwpck_require__(54852)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.Wisdom; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80155), exports); +tslib_1.__exportStar(__nccwpck_require__(94582), exports); +tslib_1.__exportStar(__nccwpck_require__(58709), exports); +tslib_1.__exportStar(__nccwpck_require__(9465), exports); +tslib_1.__exportStar(__nccwpck_require__(76556), exports); +tslib_1.__exportStar(__nccwpck_require__(67653), exports); +tslib_1.__exportStar(__nccwpck_require__(81434), exports); /***/ }), -/***/ 38835: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 76556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['workdocs'] = {}; -AWS.WorkDocs = Service.defineService('workdocs', ['2016-05-01']); -Object.defineProperty(apiLoader.services['workdocs'], '2016-05-01', { - get: function get() { - var model = __nccwpck_require__(41052); - model.paginators = (__nccwpck_require__(94768)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkDocs; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOmitRetryHeadersPlugin = exports.omitRetryHeadersMiddlewareOptions = exports.omitRetryHeadersMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const util_retry_1 = __nccwpck_require__(84902); +const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + delete request.headers[util_retry_1.INVOCATION_ID_HEADER]; + delete request.headers[util_retry_1.REQUEST_HEADER]; + } + return next(args); +}; +exports.omitRetryHeadersMiddleware = omitRetryHeadersMiddleware; +exports.omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo((0, exports.omitRetryHeadersMiddleware)(), exports.omitRetryHeadersMiddlewareOptions); + }, +}); +exports.getOmitRetryHeadersPlugin = getOmitRetryHeadersPlugin; /***/ }), -/***/ 48579: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 67653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['worklink'] = {}; -AWS.WorkLink = Service.defineService('worklink', ['2018-09-25']); -Object.defineProperty(apiLoader.services['worklink'], '2018-09-25', { - get: function get() { - var model = __nccwpck_require__(37178); - model.paginators = (__nccwpck_require__(74073)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkLink; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultRetryDecider = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return (0, service_error_classification_1.isRetryableByTrait)(error) || (0, service_error_classification_1.isClockSkewError)(error) || (0, service_error_classification_1.isThrottlingError)(error) || (0, service_error_classification_1.isTransientError)(error); +}; +exports.defaultRetryDecider = defaultRetryDecider; /***/ }), -/***/ 38374: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81434: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['workmail'] = {}; -AWS.WorkMail = Service.defineService('workmail', ['2017-10-01']); -Object.defineProperty(apiLoader.services['workmail'], '2017-10-01', { - get: function get() { - var model = __nccwpck_require__(93150); - model.paginators = (__nccwpck_require__(5158)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkMail; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryAfterHint = exports.getRetryPlugin = exports.retryMiddlewareOptions = exports.retryMiddleware = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const service_error_classification_1 = __nccwpck_require__(6375); +const util_retry_1 = __nccwpck_require__(84902); +const uuid_1 = __nccwpck_require__(75840); +const util_1 = __nccwpck_require__(42827); +const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.INVOCATION_ID_HEADER] = (0, uuid_1.v4)(); + } + while (true) { + try { + if (protocol_http_1.HttpRequest.isInstance(request)) { + request.headers[util_retry_1.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = (0, util_1.asSdkError)(e); + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy === null || retryStrategy === void 0 ? void 0 : retryStrategy.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +exports.retryMiddleware = retryMiddleware; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + errorType: getRetryErrorType(error), + }; + const retryAfterHint = (0, exports.getRetryAfterHint)(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if ((0, service_error_classification_1.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, service_error_classification_1.isTransientError)(error)) + return "TRANSIENT"; + if ((0, service_error_classification_1.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +exports.retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add((0, exports.retryMiddleware)(options), exports.retryMiddlewareOptions); + }, +}); +exports.getRetryPlugin = getRetryPlugin; +const getRetryAfterHint = (response) => { + if (!protocol_http_1.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; +exports.getRetryAfterHint = getRetryAfterHint; /***/ }), -/***/ 67025: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42827: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['workmailmessageflow'] = {}; -AWS.WorkMailMessageFlow = Service.defineService('workmailmessageflow', ['2019-05-01']); -Object.defineProperty(apiLoader.services['workmailmessageflow'], '2019-05-01', { - get: function get() { - var model = __nccwpck_require__(57733); - model.paginators = (__nccwpck_require__(85646)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkMailMessageFlow; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.asSdkError = void 0; +const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; +exports.asSdkError = asSdkError; /***/ }), -/***/ 25513: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21595: +/***/ ((__unused_webpack_module, exports) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['workspaces'] = {}; -AWS.WorkSpaces = Service.defineService('workspaces', ['2015-04-08']); -Object.defineProperty(apiLoader.services['workspaces'], '2015-04-08', { - get: function get() { - var model = __nccwpck_require__(97805); - model.paginators = (__nccwpck_require__(27769)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkSpaces; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deserializerMiddleware = void 0; +const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + error.message += "\n " + hint; + } + throw error; + } +}; +exports.deserializerMiddleware = deserializerMiddleware; /***/ }), -/***/ 94124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81238: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['workspacesweb'] = {}; -AWS.WorkSpacesWeb = Service.defineService('workspacesweb', ['2020-07-08']); -Object.defineProperty(apiLoader.services['workspacesweb'], '2020-07-08', { - get: function get() { - var model = __nccwpck_require__(47128); - model.paginators = (__nccwpck_require__(43497)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.WorkSpacesWeb; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(21595), exports); +tslib_1.__exportStar(__nccwpck_require__(72338), exports); +tslib_1.__exportStar(__nccwpck_require__(23566), exports); /***/ }), -/***/ 41548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 72338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); -var Service = AWS.Service; -var apiLoader = AWS.apiLoader; - -apiLoader.services['xray'] = {}; -AWS.XRay = Service.defineService('xray', ['2016-04-12']); -Object.defineProperty(apiLoader.services['xray'], '2016-04-12', { - get: function get() { - var model = __nccwpck_require__(97355); - model.paginators = (__nccwpck_require__(97949)/* .pagination */ .o); - return model; - }, - enumerable: true, - configurable: true -}); +"use strict"; -module.exports = AWS.XRay; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSerdePlugin = exports.serializerMiddlewareOption = exports.deserializerMiddlewareOption = void 0; +const deserializerMiddleware_1 = __nccwpck_require__(21595); +const serializerMiddleware_1 = __nccwpck_require__(23566); +exports.deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +exports.serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add((0, deserializerMiddleware_1.deserializerMiddleware)(config, deserializer), exports.deserializerMiddlewareOption); + commandStack.add((0, serializerMiddleware_1.serializerMiddleware)(config, serializer), exports.serializerMiddlewareOption); + }, + }; +} +exports.getSerdePlugin = getSerdePlugin; /***/ }), -/***/ 52793: -/***/ ((module) => { - -function apiLoader(svc, version) { - if (!apiLoader.services.hasOwnProperty(svc)) { - throw new Error('InvalidService: Failed to load api for ' + svc); - } - return apiLoader.services[svc][version]; -} +/***/ 23566: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - * - * This member of AWS.apiLoader is private, but changing it will necessitate a - * change to ../scripts/services-table-generator.ts - */ -apiLoader.services = {}; +"use strict"; -/** - * @api private - */ -module.exports = apiLoader; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializerMiddleware = void 0; +const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + var _a; + const endpoint = ((_a = context.endpointV2) === null || _a === void 0 ? void 0 : _a.url) && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; +exports.serializerMiddleware = serializerMiddleware; /***/ }), -/***/ 71786: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2404: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.constructStack = void 0; +const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.name && entry.name === toRemove) { + isRemoved = true; + entriesNameSet.delete(toRemove); + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + if (entry.name) + entriesNameSet.delete(entry.name); + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + if (normalizedEntry.name) + normalizedEntriesNameMap[normalizedEntry.name] = normalizedEntry; + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + if (normalizedEntry.name) + normalizedEntriesNameMap[normalizedEntry.name] = normalizedEntry; + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ${entry.name || "anonymous"} middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + if (name) { + if (entriesNameSet.has(name)) { + if (!override) + throw new Error(`Duplicate middleware name '${name}'`); + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === name); + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || toOverride.priority !== entry.priority) { + throw new Error(`"${name}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be ` + + `overridden by same-name middleware with ${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + entriesNameSet.add(name); + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override } = options; + const entry = { + middleware, + ...options, + }; + if (name) { + if (entriesNameSet.has(name)) { + if (!override) + throw new Error(`Duplicate middleware name '${name}'`); + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === name); + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${name}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by same-name middleware ${entry.relation} "${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + entriesNameSet.add(name); + } + relativeEntries.push(entry); + }, + clone: () => cloneTo((0, exports.constructStack)()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name } = entry; + if (tags && tags.includes(toRemove)) { + if (name) + entriesNameSet.delete(name); + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo((0, exports.constructStack)()); + cloned.use(from); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + return mw.name + ": " + (mw.tags || []).join(","); + }); + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + return handler; + }, + }; + return stack; +}; +exports.constructStack = constructStack; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; -__nccwpck_require__(73639); -var AWS = __nccwpck_require__(28437); +/***/ }), -// Load all service classes -__nccwpck_require__(26296); +/***/ 97911: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * @api private - */ -module.exports = AWS; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(2404), exports); /***/ }), -/***/ 93260: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 54766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437), - url = AWS.util.url, - crypto = AWS.util.crypto.lib, - base64Encode = AWS.util.base64.encode, - inherit = AWS.util.inherit; - -var queryEncode = function (string) { - var replacements = { - '+': '-', - '=': '_', - '/': '~' - }; - return string.replace(/[\+=\/]/g, function (match) { - return replacements[match]; - }); -}; +"use strict"; -var signPolicy = function (policy, privateKey) { - var sign = crypto.createSign('RSA-SHA1'); - sign.write(policy); - return queryEncode(sign.sign(privateKey, 'base64')); -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfig = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromEnv_1 = __nccwpck_require__(15606); +const fromSharedConfigFiles_1 = __nccwpck_require__(45784); +const fromStatic_1 = __nccwpck_require__(23091); +const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, property_provider_1.memoize)((0, property_provider_1.chain)((0, fromEnv_1.fromEnv)(environmentVariableSelector), (0, fromSharedConfigFiles_1.fromSharedConfigFiles)(configFileSelector, configuration), (0, fromStatic_1.fromStatic)(defaultValue))); +exports.loadConfig = loadConfig; -var signWithCannedPolicy = function (url, expires, keyPairId, privateKey) { - var policy = JSON.stringify({ - Statement: [ - { - Resource: url, - Condition: { DateLessThan: { 'AWS:EpochTime': expires } } - } - ] - }); - return { - Expires: expires, - 'Key-Pair-Id': keyPairId, - Signature: signPolicy(policy.toString(), privateKey) - }; -}; +/***/ }), -var signWithCustomPolicy = function (policy, keyPairId, privateKey) { - policy = policy.replace(/\s/mg, ''); +/***/ 15606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return { - Policy: queryEncode(base64Encode(policy)), - 'Key-Pair-Id': keyPairId, - Signature: signPolicy(policy, privateKey) - }; -}; +"use strict"; -var determineScheme = function (url) { - var parts = url.split('://'); - if (parts.length < 2) { - throw new Error('Invalid URL.'); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromEnv = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const fromEnv = (envVarSelector) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config from environment variables with getter: ${envVarSelector}`); } - - return parts[0].replace('*', ''); }; +exports.fromEnv = fromEnv; -var getRtmpUrl = function (rtmpUrl) { - var parsed = url.parse(rtmpUrl); - return parsed.path.replace(/^\//, '') + (parsed.hash || ''); -}; -var getResource = function (url) { - switch (determineScheme(url)) { - case 'http': - case 'https': - return url; - case 'rtmp': - return getRtmpUrl(url); - default: - throw new Error('Invalid URI scheme. Scheme must be one of' - + ' http, https, or rtmp'); - } -}; +/***/ }), -var handleError = function (err, callback) { - if (!callback || typeof callback !== 'function') { - throw err; - } +/***/ 45784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - callback(err); -}; +"use strict"; -var handleSuccess = function (result, callback) { - if (!callback || typeof callback !== 'function') { - return result; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromSharedConfigFiles = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const shared_ini_file_loader_1 = __nccwpck_require__(43507); +const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, shared_ini_file_loader_1.getProfileName)(init); + const { configFile, credentialsFile } = await (0, shared_ini_file_loader_1.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const configValue = configSelector(mergedProfile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(e.message || `Cannot load config for profile ${profile} in SDK configuration files with getter: ${configSelector}`); } - - callback(null, result); }; +exports.fromSharedConfigFiles = fromSharedConfigFiles; -AWS.CloudFront.Signer = inherit({ - /** - * A signer object can be used to generate signed URLs and cookies for granting - * access to content on restricted CloudFront distributions. - * - * @see http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/PrivateContent.html - * - * @param keyPairId [String] (Required) The ID of the CloudFront key pair - * being used. - * @param privateKey [String] (Required) A private key in RSA format. - */ - constructor: function Signer(keyPairId, privateKey) { - if (keyPairId === void 0 || privateKey === void 0) { - throw new Error('A key pair ID and private key are required'); - } - - this.keyPairId = keyPairId; - this.privateKey = privateKey; - }, - - /** - * Create a signed Amazon CloudFront Cookie. - * - * @param options [Object] The options to create a signed cookie. - * @option options url [String] The URL to which the signature will grant - * access. Required unless you pass in a full - * policy. - * @option options expires [Number] A Unix UTC timestamp indicating when the - * signature should expire. Required unless you - * pass in a full policy. - * @option options policy [String] A CloudFront JSON policy. Required unless - * you pass in a url and an expiry time. - * - * @param cb [Function] if a callback is provided, this function will - * pass the hash as the second parameter (after the error parameter) to - * the callback function. - * - * @return [Object] if called synchronously (with no callback), returns the - * signed cookie parameters. - * @return [null] nothing is returned if a callback is provided. - */ - getSignedCookie: function (options, cb) { - var signatureHash = 'policy' in options - ? signWithCustomPolicy(options.policy, this.keyPairId, this.privateKey) - : signWithCannedPolicy(options.url, options.expires, this.keyPairId, this.privateKey); - - var cookieHash = {}; - for (var key in signatureHash) { - if (Object.prototype.hasOwnProperty.call(signatureHash, key)) { - cookieHash['CloudFront-' + key] = signatureHash[key]; - } - } - - return handleSuccess(cookieHash, cb); - }, - - /** - * Create a signed Amazon CloudFront URL. - * - * Keep in mind that URLs meant for use in media/flash players may have - * different requirements for URL formats (e.g. some require that the - * extension be removed, some require the file name to be prefixed - * - mp4:, some require you to add "/cfx/st" into your URL). - * - * @param options [Object] The options to create a signed URL. - * @option options url [String] The URL to which the signature will grant - * access. Any query params included with - * the URL should be encoded. Required. - * @option options expires [Number] A Unix UTC timestamp indicating when the - * signature should expire. Required unless you - * pass in a full policy. - * @option options policy [String] A CloudFront JSON policy. Required unless - * you pass in a url and an expiry time. - * - * @param cb [Function] if a callback is provided, this function will - * pass the URL as the second parameter (after the error parameter) to - * the callback function. - * - * @return [String] if called synchronously (with no callback), returns the - * signed URL. - * @return [null] nothing is returned if a callback is provided. - */ - getSignedUrl: function (options, cb) { - try { - var resource = getResource(options.url); - } catch (err) { - return handleError(err, cb); - } - var parsedUrl = url.parse(options.url, true), - signatureHash = Object.prototype.hasOwnProperty.call(options, 'policy') - ? signWithCustomPolicy(options.policy, this.keyPairId, this.privateKey) - : signWithCannedPolicy(resource, options.expires, this.keyPairId, this.privateKey); - - parsedUrl.search = null; - for (var key in signatureHash) { - if (Object.prototype.hasOwnProperty.call(signatureHash, key)) { - parsedUrl.query[key] = signatureHash[key]; - } - } +/***/ }), - try { - var signedUrl = determineScheme(options.url) === 'rtmp' - ? getRtmpUrl(url.format(parsedUrl)) - : url.format(parsedUrl); - } catch (err) { - return handleError(err, cb); - } +/***/ 23091: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return handleSuccess(signedUrl, cb); - } -}); +"use strict"; -/** - * @api private - */ -module.exports = AWS.CloudFront.Signer; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const property_provider_1 = __nccwpck_require__(79721); +const isFunction = (func) => typeof func === "function"; +const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, property_provider_1.fromStatic)(defaultValue); +exports.fromStatic = fromStatic; /***/ }), -/***/ 38110: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -__nccwpck_require__(53819); -__nccwpck_require__(36965); -var PromisesDependency; +/***/ 33461: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * The main configuration class used by all service objects to set - * the region, credentials, and other options for requests. - * - * By default, credentials and region settings are left unconfigured. - * This should be configured by the application before using any - * AWS service APIs. - * - * In order to set global configuration options, properties should - * be assigned to the global {AWS.config} object. - * - * @see AWS.config - * - * @!group General Configuration Options - * - * @!attribute credentials - * @return [AWS.Credentials] the AWS credentials to sign requests with. - * - * @!attribute region - * @example Set the global region setting to us-west-2 - * AWS.config.update({region: 'us-west-2'}); - * @return [AWS.Credentials] The region to send service requests to. - * @see http://docs.amazonwebservices.com/general/latest/gr/rande.html - * A list of available endpoints for each AWS service - * - * @!attribute maxRetries - * @return [Integer] the maximum amount of retries to perform for a - * service request. By default this value is calculated by the specific - * service object that the request is being made to. - * - * @!attribute maxRedirects - * @return [Integer] the maximum amount of redirects to follow for a - * service request. Defaults to 10. - * - * @!attribute paramValidation - * @return [Boolean|map] whether input parameters should be validated against - * the operation description before sending the request. Defaults to true. - * Pass a map to enable any of the following specific validation features: - * - * * **min** [Boolean] — Validates that a value meets the min - * constraint. This is enabled by default when paramValidation is set - * to `true`. - * * **max** [Boolean] — Validates that a value meets the max - * constraint. - * * **pattern** [Boolean] — Validates that a string value matches a - * regular expression. - * * **enum** [Boolean] — Validates that a string value matches one - * of the allowable enum values. - * - * @!attribute computeChecksums - * @return [Boolean] whether to compute checksums for payload bodies when - * the service accepts it (currently supported in S3 and SQS only). - * - * @!attribute convertResponseTypes - * @return [Boolean] whether types are converted when parsing response data. - * Currently only supported for JSON based services. Turning this off may - * improve performance on large response payloads. Defaults to `true`. - * - * @!attribute correctClockSkew - * @return [Boolean] whether to apply a clock skew correction and retry - * requests that fail because of an skewed client clock. Defaults to - * `false`. - * - * @!attribute sslEnabled - * @return [Boolean] whether SSL is enabled for requests - * - * @!attribute s3ForcePathStyle - * @return [Boolean] whether to force path style URLs for S3 objects - * - * @!attribute s3BucketEndpoint - * @note Setting this configuration option requires an `endpoint` to be - * provided explicitly to the service constructor. - * @return [Boolean] whether the provided endpoint addresses an individual - * bucket (false if it addresses the root API endpoint). - * - * @!attribute s3DisableBodySigning - * @return [Boolean] whether to disable S3 body signing when using signature version `v4`. - * Body signing can only be disabled when using https. Defaults to `true`. - * - * @!attribute s3UsEast1RegionalEndpoint - * @return ['legacy'|'regional'] when region is set to 'us-east-1', whether to send s3 - * request to global endpoints or 'us-east-1' regional endpoints. This config is only - * applicable to S3 client; - * Defaults to 'legacy' - * @!attribute s3UseArnRegion - * @return [Boolean] whether to override the request region with the region inferred - * from requested resource's ARN. Only available for S3 buckets - * Defaults to `true` - * - * @!attribute useAccelerateEndpoint - * @note This configuration option is only compatible with S3 while accessing - * dns-compatible buckets. - * @return [Boolean] Whether to use the Accelerate endpoint with the S3 service. - * Defaults to `false`. - * - * @!attribute retryDelayOptions - * @example Set the base retry delay for all services to 300 ms - * AWS.config.update({retryDelayOptions: {base: 300}}); - * // Delays with maxRetries = 3: 300, 600, 1200 - * @example Set a custom backoff function to provide delay values on retries - * AWS.config.update({retryDelayOptions: {customBackoff: function(retryCount, err) { - * // returns delay in ms - * }}}); - * @return [map] A set of options to configure the retry delay on retryable errors. - * Currently supported options are: - * - * * **base** [Integer] — The base number of milliseconds to use in the - * exponential backoff for operation retries. Defaults to 100 ms for all services except - * DynamoDB, where it defaults to 50ms. - * - * * **customBackoff ** [function] — A custom function that accepts a - * retry count and error and returns the amount of time to delay in - * milliseconds. If the result is a non-zero negative value, no further - * retry attempts will be made. The `base` option will be ignored if this - * option is supplied. The function is only called for retryable errors. - * - * @!attribute httpOptions - * @return [map] A set of options to pass to the low-level HTTP request. - * Currently supported options are: - * - * * **proxy** [String] — the URL to proxy requests through - * * **agent** [http.Agent, https.Agent] — the Agent object to perform - * HTTP requests with. Used for connection pooling. Note that for - * SSL connections, a special Agent object is used in order to enable - * peer certificate verification. This feature is only supported in the - * Node.js environment. - * * **connectTimeout** [Integer] — Sets the socket to timeout after - * failing to establish a connection with the server after - * `connectTimeout` milliseconds. This timeout has no effect once a socket - * connection has been established. - * * **timeout** [Integer] — The number of milliseconds a request can - * take before automatically being terminated. - * Defaults to two minutes (120000). - * * **xhrAsync** [Boolean] — Whether the SDK will send asynchronous - * HTTP requests. Used in the browser environment only. Set to false to - * send requests synchronously. Defaults to true (async on). - * * **xhrWithCredentials** [Boolean] — Sets the "withCredentials" - * property of an XMLHttpRequest object. Used in the browser environment - * only. Defaults to false. - * @!attribute logger - * @return [#write,#log] an object that responds to .write() (like a stream) - * or .log() (like the console object) in order to log information about - * requests - * - * @!attribute systemClockOffset - * @return [Number] an offset value in milliseconds to apply to all signing - * times. Use this to compensate for clock skew when your system may be - * out of sync with the service time. Note that this configuration option - * can only be applied to the global `AWS.config` object and cannot be - * overridden in service-specific configuration. Defaults to 0 milliseconds. - * - * @!attribute signatureVersion - * @return [String] the signature version to sign requests with (overriding - * the API configuration). Possible values are: 'v2', 'v3', 'v4'. - * - * @!attribute signatureCache - * @return [Boolean] whether the signature to sign requests with (overriding - * the API configuration) is cached. Only applies to the signature version 'v4'. - * Defaults to `true`. - * - * @!attribute endpointDiscoveryEnabled - * @return [Boolean|undefined] whether to call operations with endpoints - * given by service dynamically. Setting this config to `true` will enable - * endpoint discovery for all applicable operations. Setting it to `false` - * will explicitly disable endpoint discovery even though operations that - * require endpoint discovery will presumably fail. Leaving it to - * `undefined` means SDK only do endpoint discovery when it's required. - * Defaults to `undefined` - * - * @!attribute endpointCacheSize - * @return [Number] the size of the global cache storing endpoints from endpoint - * discovery operations. Once endpoint cache is created, updating this setting - * cannot change existing cache size. - * Defaults to 1000 - * - * @!attribute hostPrefixEnabled - * @return [Boolean] whether to marshal request parameters to the prefix of - * hostname. Defaults to `true`. - * - * @!attribute stsRegionalEndpoints - * @return ['legacy'|'regional'] whether to send sts request to global endpoints or - * regional endpoints. - * Defaults to 'legacy'. - * - * @!attribute useFipsEndpoint - * @return [Boolean] Enables FIPS compatible endpoints. Defaults to `false`. - * - * @!attribute useDualstackEndpoint - * @return [Boolean] Enables IPv6 dualstack endpoint. Defaults to `false`. - */ -AWS.Config = AWS.util.inherit({ - /** - * @!endgroup - */ - - /** - * Creates a new configuration object. This is the object that passes - * option data along to service requests, including credentials, security, - * region information, and some service specific settings. - * - * @example Creating a new configuration object with credentials and region - * var config = new AWS.Config({ - * accessKeyId: 'AKID', secretAccessKey: 'SECRET', region: 'us-west-2' - * }); - * @option options accessKeyId [String] your AWS access key ID. - * @option options secretAccessKey [String] your AWS secret access key. - * @option options sessionToken [AWS.Credentials] the optional AWS - * session token to sign requests with. - * @option options credentials [AWS.Credentials] the AWS credentials - * to sign requests with. You can either specify this object, or - * specify the accessKeyId and secretAccessKey options directly. - * @option options credentialProvider [AWS.CredentialProviderChain] the - * provider chain used to resolve credentials if no static `credentials` - * property is set. - * @option options region [String] the region to send service requests to. - * See {region} for more information. - * @option options maxRetries [Integer] the maximum amount of retries to - * attempt with a request. See {maxRetries} for more information. - * @option options maxRedirects [Integer] the maximum amount of redirects to - * follow with a request. See {maxRedirects} for more information. - * @option options sslEnabled [Boolean] whether to enable SSL for - * requests. - * @option options paramValidation [Boolean|map] whether input parameters - * should be validated against the operation description before sending - * the request. Defaults to true. Pass a map to enable any of the - * following specific validation features: - * - * * **min** [Boolean] — Validates that a value meets the min - * constraint. This is enabled by default when paramValidation is set - * to `true`. - * * **max** [Boolean] — Validates that a value meets the max - * constraint. - * * **pattern** [Boolean] — Validates that a string value matches a - * regular expression. - * * **enum** [Boolean] — Validates that a string value matches one - * of the allowable enum values. - * @option options computeChecksums [Boolean] whether to compute checksums - * for payload bodies when the service accepts it (currently supported - * in S3 only) - * @option options convertResponseTypes [Boolean] whether types are converted - * when parsing response data. Currently only supported for JSON based - * services. Turning this off may improve performance on large response - * payloads. Defaults to `true`. - * @option options correctClockSkew [Boolean] whether to apply a clock skew - * correction and retry requests that fail because of an skewed client - * clock. Defaults to `false`. - * @option options s3ForcePathStyle [Boolean] whether to force path - * style URLs for S3 objects. - * @option options s3BucketEndpoint [Boolean] whether the provided endpoint - * addresses an individual bucket (false if it addresses the root API - * endpoint). Note that setting this configuration option requires an - * `endpoint` to be provided explicitly to the service constructor. - * @option options s3DisableBodySigning [Boolean] whether S3 body signing - * should be disabled when using signature version `v4`. Body signing - * can only be disabled when using https. Defaults to `true`. - * @option options s3UsEast1RegionalEndpoint ['legacy'|'regional'] when region - * is set to 'us-east-1', whether to send s3 request to global endpoints or - * 'us-east-1' regional endpoints. This config is only applicable to S3 client. - * Defaults to `legacy` - * @option options s3UseArnRegion [Boolean] whether to override the request region - * with the region inferred from requested resource's ARN. Only available for S3 buckets - * Defaults to `true` - * - * @option options retryDelayOptions [map] A set of options to configure - * the retry delay on retryable errors. Currently supported options are: - * - * * **base** [Integer] — The base number of milliseconds to use in the - * exponential backoff for operation retries. Defaults to 100 ms for all - * services except DynamoDB, where it defaults to 50ms. - * * **customBackoff ** [function] — A custom function that accepts a - * retry count and error and returns the amount of time to delay in - * milliseconds. If the result is a non-zero negative value, no further - * retry attempts will be made. The `base` option will be ignored if this - * option is supplied. The function is only called for retryable errors. - * @option options httpOptions [map] A set of options to pass to the low-level - * HTTP request. Currently supported options are: - * - * * **proxy** [String] — the URL to proxy requests through - * * **agent** [http.Agent, https.Agent] — the Agent object to perform - * HTTP requests with. Used for connection pooling. Defaults to the global - * agent (`http.globalAgent`) for non-SSL connections. Note that for - * SSL connections, a special Agent object is used in order to enable - * peer certificate verification. This feature is only available in the - * Node.js environment. - * * **connectTimeout** [Integer] — Sets the socket to timeout after - * failing to establish a connection with the server after - * `connectTimeout` milliseconds. This timeout has no effect once a socket - * connection has been established. - * * **timeout** [Integer] — Sets the socket to timeout after timeout - * milliseconds of inactivity on the socket. Defaults to two minutes - * (120000). - * * **xhrAsync** [Boolean] — Whether the SDK will send asynchronous - * HTTP requests. Used in the browser environment only. Set to false to - * send requests synchronously. Defaults to true (async on). - * * **xhrWithCredentials** [Boolean] — Sets the "withCredentials" - * property of an XMLHttpRequest object. Used in the browser environment - * only. Defaults to false. - * @option options apiVersion [String, Date] a String in YYYY-MM-DD format - * (or a date) that represents the latest possible API version that can be - * used in all services (unless overridden by `apiVersions`). Specify - * 'latest' to use the latest possible version. - * @option options apiVersions [map] a map of service - * identifiers (the lowercase service class name) with the API version to - * use when instantiating a service. Specify 'latest' for each individual - * that can use the latest available version. - * @option options logger [#write,#log] an object that responds to .write() - * (like a stream) or .log() (like the console object) in order to log - * information about requests - * @option options systemClockOffset [Number] an offset value in milliseconds - * to apply to all signing times. Use this to compensate for clock skew - * when your system may be out of sync with the service time. Note that - * this configuration option can only be applied to the global `AWS.config` - * object and cannot be overridden in service-specific configuration. - * Defaults to 0 milliseconds. - * @option options signatureVersion [String] the signature version to sign - * requests with (overriding the API configuration). Possible values are: - * 'v2', 'v3', 'v4'. - * @option options signatureCache [Boolean] whether the signature to sign - * requests with (overriding the API configuration) is cached. Only applies - * to the signature version 'v4'. Defaults to `true`. - * @option options dynamoDbCrc32 [Boolean] whether to validate the CRC32 - * checksum of HTTP response bodies returned by DynamoDB. Default: `true`. - * @option options useAccelerateEndpoint [Boolean] Whether to use the - * S3 Transfer Acceleration endpoint with the S3 service. Default: `false`. - * @option options clientSideMonitoring [Boolean] whether to collect and - * publish this client's performance metrics of all its API requests. - * @option options endpointDiscoveryEnabled [Boolean|undefined] whether to - * call operations with endpoints given by service dynamically. Setting this - * config to `true` will enable endpoint discovery for all applicable operations. - * Setting it to `false` will explicitly disable endpoint discovery even though - * operations that require endpoint discovery will presumably fail. Leaving it - * to `undefined` means SDK will only do endpoint discovery when it's required. - * Defaults to `undefined` - * @option options endpointCacheSize [Number] the size of the global cache storing - * endpoints from endpoint discovery operations. Once endpoint cache is created, - * updating this setting cannot change existing cache size. - * Defaults to 1000 - * @option options hostPrefixEnabled [Boolean] whether to marshal request - * parameters to the prefix of hostname. - * Defaults to `true`. - * @option options stsRegionalEndpoints ['legacy'|'regional'] whether to send sts request - * to global endpoints or regional endpoints. - * Defaults to 'legacy'. - * @option options useFipsEndpoint [Boolean] Enables FIPS compatible endpoints. - * Defaults to `false`. - * @option options useDualstackEndpoint [Boolean] Enables IPv6 dualstack endpoint. - * Defaults to `false`. - */ - constructor: function Config(options) { - if (options === undefined) options = {}; - options = this.extractCredentials(options); - - AWS.util.each.call(this, this.keys, function (key, value) { - this.set(key, options[key], value); - }); - }, +"use strict"; - /** - * @!group Managing Credentials - */ - - /** - * Loads credentials from the configuration object. This is used internally - * by the SDK to ensure that refreshable {Credentials} objects are properly - * refreshed and loaded when sending a request. If you want to ensure that - * your credentials are loaded prior to a request, you can use this method - * directly to provide accurate credential data stored in the object. - * - * @note If you configure the SDK with static or environment credentials, - * the credential data should already be present in {credentials} attribute. - * This method is primarily necessary to load credentials from asynchronous - * sources, or sources that can refresh credentials periodically. - * @example Getting your access key - * AWS.config.getCredentials(function(err) { - * if (err) console.log(err.stack); // credentials not loaded - * else console.log("Access Key:", AWS.config.credentials.accessKeyId); - * }) - * @callback callback function(err) - * Called when the {credentials} have been properly set on the configuration - * object. - * - * @param err [Error] if this is set, credentials were not successfully - * loaded and this error provides information why. - * @see credentials - * @see Credentials - */ - getCredentials: function getCredentials(callback) { - var self = this; - - function finish(err) { - callback(err, err ? null : self.credentials); - } - - function credError(msg, err) { - return new AWS.util.error(err || new Error(), { - code: 'CredentialsError', - message: msg, - name: 'CredentialsError' - }); - } - - function getAsyncCredentials() { - self.credentials.get(function(err) { - if (err) { - var msg = 'Could not load credentials from ' + - self.credentials.constructor.name; - err = credError(msg, err); - } - finish(err); - }); - } - - function getStaticCredentials() { - var err = null; - if (!self.credentials.accessKeyId || !self.credentials.secretAccessKey) { - err = credError('Missing credentials'); - } - finish(err); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(54766), exports); - if (self.credentials) { - if (typeof self.credentials.get === 'function') { - getAsyncCredentials(); - } else { // static credentials - getStaticCredentials(); - } - } else if (self.credentialProvider) { - self.credentialProvider.resolve(function(err, creds) { - if (err) { - err = credError('Could not load credentials from any providers', err); - } - self.credentials = creds; - finish(err); - }); - } else { - finish(credError('No credentials to load')); - } - }, - /** - * Loads token from the configuration object. This is used internally - * by the SDK to ensure that refreshable {Token} objects are properly - * refreshed and loaded when sending a request. If you want to ensure that - * your token is loaded prior to a request, you can use this method - * directly to provide accurate token data stored in the object. - * - * @note If you configure the SDK with static token, the token data should - * already be present in {token} attribute. This method is primarily necessary - * to load token from asynchronous sources, or sources that can refresh - * token periodically. - * @example Getting your access token - * AWS.config.getToken(function(err) { - * if (err) console.log(err.stack); // token not loaded - * else console.log("Token:", AWS.config.token.token); - * }) - * @callback callback function(err) - * Called when the {token} have been properly set on the configuration object. - * - * @param err [Error] if this is set, token was not successfully loaded and - * this error provides information why. - * @see token - */ - getToken: function getToken(callback) { - var self = this; - - function finish(err) { - callback(err, err ? null : self.token); - } - - function tokenError(msg, err) { - return new AWS.util.error(err || new Error(), { - code: 'TokenError', - message: msg, - name: 'TokenError' - }); - } - - function getAsyncToken() { - self.token.get(function(err) { - if (err) { - var msg = 'Could not load token from ' + - self.token.constructor.name; - err = tokenError(msg, err); - } - finish(err); - }); - } - - function getStaticToken() { - var err = null; - if (!self.token.token) { - err = tokenError('Missing token'); - } - finish(err); - } +/***/ }), - if (self.token) { - if (typeof self.token.get === 'function') { - getAsyncToken(); - } else { // static token - getStaticToken(); - } - } else if (self.tokenProvider) { - self.tokenProvider.resolve(function(err, token) { - if (err) { - err = tokenError('Could not load token from any providers', err); - } - self.token = token; - finish(err); - }); - } else { - finish(tokenError('No token to load')); - } - }, +/***/ 33946: +/***/ ((__unused_webpack_module, exports) => { - /** - * @!group Loading and Setting Configuration Options - */ - - /** - * @overload update(options, allowUnknownKeys = false) - * Updates the current configuration object with new options. - * - * @example Update maxRetries property of a configuration object - * config.update({maxRetries: 10}); - * @param [Object] options a map of option keys and values. - * @param [Boolean] allowUnknownKeys whether unknown keys can be set on - * the configuration object. Defaults to `false`. - * @see constructor - */ - update: function update(options, allowUnknownKeys) { - allowUnknownKeys = allowUnknownKeys || false; - options = this.extractCredentials(options); - AWS.util.each.call(this, options, function (key, value) { - if (allowUnknownKeys || Object.prototype.hasOwnProperty.call(this.keys, key) || - AWS.Service.hasService(key)) { - this.set(key, value); - } - }); - }, +"use strict"; - /** - * Loads configuration data from a JSON file into this config object. - * @note Loading configuration will reset all existing configuration - * on the object. - * @!macro nobrowser - * @param path [String] the path relative to your process's current - * working directory to load configuration from. - * @return [AWS.Config] the same configuration object - */ - loadFromPath: function loadFromPath(path) { - this.clear(); - - var options = JSON.parse(AWS.util.readFileSync(path)); - var fileSystemCreds = new AWS.FileSystemCredentials(path); - var chain = new AWS.CredentialProviderChain(); - chain.providers.unshift(fileSystemCreds); - chain.resolve(function (err, creds) { - if (err) throw err; - else options.credentials = creds; - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = void 0; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; - this.constructor(options); - return this; - }, +/***/ }), - /** - * Clears configuration data on this object - * - * @api private - */ - clear: function clear() { - /*jshint forin:false */ - AWS.util.each.call(this, this.keys, function (key) { - delete this[key]; - }); +/***/ 70508: +/***/ ((__unused_webpack_module, exports) => { - // reset credential provider - this.set('credentials', undefined); - this.set('credentialProvider', undefined); - }, +"use strict"; - /** - * Sets a property on the configuration object, allowing for a - * default value - * @api private - */ - set: function set(property, value, defaultValue) { - if (value === undefined) { - if (defaultValue === undefined) { - defaultValue = this.keys[property]; - } - if (typeof defaultValue === 'function') { - this[property] = defaultValue.call(this); - } else { - this[property] = defaultValue; - } - } else if (property === 'httpOptions' && this[property]) { - // deep merge httpOptions - this[property] = AWS.util.merge(this[property], value); - } else { - this[property] = value; - } - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getTransformedHeaders = void 0; +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +exports.getTransformedHeaders = getTransformedHeaders; - /** - * All of the keys with their default values. - * - * @constant - * @api private - */ - keys: { - credentials: null, - credentialProvider: null, - region: null, - logger: null, - apiVersions: {}, - apiVersion: null, - endpoint: undefined, - httpOptions: { - timeout: 120000 - }, - maxRetries: undefined, - maxRedirects: 10, - paramValidation: true, - sslEnabled: true, - s3ForcePathStyle: false, - s3BucketEndpoint: false, - s3DisableBodySigning: true, - s3UsEast1RegionalEndpoint: 'legacy', - s3UseArnRegion: undefined, - computeChecksums: true, - convertResponseTypes: true, - correctClockSkew: false, - customUserAgent: null, - dynamoDbCrc32: true, - systemClockOffset: 0, - signatureVersion: null, - signatureCache: true, - retryDelayOptions: {}, - useAccelerateEndpoint: false, - clientSideMonitoring: false, - endpointDiscoveryEnabled: undefined, - endpointCacheSize: 1000, - hostPrefixEnabled: true, - stsRegionalEndpoints: 'legacy', - useFipsEndpoint: false, - useDualstackEndpoint: false, - token: null - }, - /** - * Extracts accessKeyId, secretAccessKey and sessionToken - * from a configuration hash. - * - * @api private - */ - extractCredentials: function extractCredentials(options) { - if (options.accessKeyId && options.secretAccessKey) { - options = AWS.util.copy(options); - options.credentials = new AWS.Credentials(options); - } - return options; - }, +/***/ }), - /** - * Sets the promise dependency the SDK will use wherever Promises are returned. - * Passing `null` will force the SDK to use native Promises if they are available. - * If native Promises are not available, passing `null` will have no effect. - * @param [Constructor] dep A reference to a Promise constructor - */ - setPromisesDependency: function setPromisesDependency(dep) { - PromisesDependency = dep; - // if null was passed in, we should try to use native promises - if (dep === null && typeof Promise === 'function') { - PromisesDependency = Promise; - } - var constructors = [AWS.Request, AWS.Credentials, AWS.CredentialProviderChain]; - if (AWS.S3) { - constructors.push(AWS.S3); - if (AWS.S3.ManagedUpload) { - constructors.push(AWS.S3.ManagedUpload); - } - } - AWS.util.addPromises(constructors, PromisesDependency); - }, +/***/ 20258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * Gets the promise dependency set by `AWS.config.setPromisesDependency`. - */ - getPromisesDependency: function getPromisesDependency() { - return PromisesDependency; - } -}); +"use strict"; -/** - * @return [AWS.Config] The global configuration object singleton instance - * @readonly - * @see AWS.Config - */ -AWS.config = new AWS.Config(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(96948), exports); +tslib_1.__exportStar(__nccwpck_require__(46999), exports); +tslib_1.__exportStar(__nccwpck_require__(81030), exports); /***/ }), -/***/ 85566: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 96948: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -/** - * @api private - */ -function validateRegionalEndpointsFlagValue(configValue, errorOptions) { - if (typeof configValue !== 'string') return undefined; - else if (['legacy', 'regional'].indexOf(configValue.toLowerCase()) >= 0) { - return configValue.toLowerCase(); - } else { - throw AWS.util.error(new Error(), errorOptions); - } -} +"use strict"; -/** - * Resolve the configuration value for regional endpoint from difference sources: client - * config, environmental variable, shared config file. Value can be case-insensitive - * 'legacy' or 'reginal'. - * @param originalConfig user-supplied config object to resolve - * @param options a map of config property names from individual configuration source - * - env: name of environmental variable that refers to the config - * - sharedConfig: name of shared configuration file property that refers to the config - * - clientConfig: name of client configuration property that refers to the config - * - * @api private - */ -function resolveRegionalEndpointsFlag(originalConfig, options) { - originalConfig = originalConfig || {}; - //validate config value - var resolved; - if (originalConfig[options.clientConfig]) { - resolved = validateRegionalEndpointsFlagValue(originalConfig[options.clientConfig], { - code: 'InvalidConfiguration', - message: 'invalid "' + options.clientConfig + '" configuration. Expect "legacy" ' + - ' or "regional". Got "' + originalConfig[options.clientConfig] + '".' - }); - if (resolved) return resolved; - } - if (!AWS.util.isNode()) return resolved; - //validate environmental variable - if (Object.prototype.hasOwnProperty.call(process.env, options.env)) { - var envFlag = process.env[options.env]; - resolved = validateRegionalEndpointsFlagValue(envFlag, { - code: 'InvalidEnvironmentalVariable', - message: 'invalid ' + options.env + ' environmental variable. Expect "legacy" ' + - ' or "regional". Got "' + process.env[options.env] + '".' - }); - if (resolved) return resolved; - } - //validate shared config file - var profile = {}; - try { - var profiles = AWS.util.getProfilesFromSharedConfig(AWS.util.iniLoader); - profile = profiles[process.env.AWS_PROFILE || AWS.util.defaultProfile]; - } catch (e) {}; - if (profile && Object.prototype.hasOwnProperty.call(profile, options.sharedConfig)) { - var fileFlag = profile[options.sharedConfig]; - resolved = validateRegionalEndpointsFlagValue(fileFlag, { - code: 'InvalidConfiguration', - message: 'invalid ' + options.sharedConfig + ' profile config. Expect "legacy" ' + - ' or "regional". Got "' + profile[options.sharedConfig] + '".' - }); - if (resolved) return resolved; - } - return resolved; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttpHandler = exports.DEFAULT_REQUEST_TIMEOUT = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const querystring_builder_1 = __nccwpck_require__(68031); +const http_1 = __nccwpck_require__(13685); +const https_1 = __nccwpck_require__(95687); +const constants_1 = __nccwpck_require__(33946); +const get_transformed_headers_1 = __nccwpck_require__(70508); +const set_connection_timeout_1 = __nccwpck_require__(25545); +const set_socket_keep_alive_1 = __nccwpck_require__(83751); +const set_socket_timeout_1 = __nccwpck_require__(42618); +const write_request_body_1 = __nccwpck_require__(73766); +exports.DEFAULT_REQUEST_TIMEOUT = 0; +class NodeHttpHandler { + constructor(options) { + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout !== null && requestTimeout !== void 0 ? requestTimeout : socketTimeout, + httpAgent: httpAgent || new http_1.Agent({ keepAlive, maxSockets }), + httpsAgent: httpsAgent || new https_1.Agent({ keepAlive, maxSockets }), + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) === null || _a === void 0 ? void 0 : _a.httpAgent) === null || _b === void 0 ? void 0 : _b.destroy(); + (_d = (_c = this.config) === null || _c === void 0 ? void 0 : _c.httpsAgent) === null || _d === void 0 ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + var _a, _b; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const queryString = (0, querystring_builder_1.buildQueryString)(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const nodeHttpsOptions = { + headers: request.headers, + host: request.hostname, + method: request.method, + path, + port: request.port, + agent: isSSL ? this.config.httpsAgent : this.config.httpAgent, + auth, + }; + const requestFunc = isSSL ? https_1.request : http_1.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + (0, set_connection_timeout_1.setConnectionTimeout)(req, reject, this.config.connectionTimeout); + (0, set_socket_timeout_1.setSocketTimeout)(req, reject, this.config.requestTimeout); + if (abortSignal) { + abortSignal.onabort = () => { + req.abort(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + } + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + (0, set_socket_keep_alive_1.setSocketKeepAlive)(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + }); + } + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, this.config.requestTimeout).catch(_reject); + }); + } } - -module.exports = resolveRegionalEndpointsFlag; +exports.NodeHttpHandler = NodeHttpHandler; /***/ }), -/***/ 28437: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * The main AWS namespace - */ -var AWS = { util: __nccwpck_require__(77985) }; - -/** - * @api private - * @!macro [new] nobrowser - * @note This feature is not supported in the browser environment of the SDK. - */ -var _hidden = {}; _hidden.toString(); // hack to parse macro - -/** - * @api private - */ -module.exports = AWS; - -AWS.util.update(AWS, { - - /** - * @constant - */ - VERSION: '2.1396.0', - - /** - * @api private - */ - Signers: {}, - - /** - * @api private - */ - Protocol: { - Json: __nccwpck_require__(30083), - Query: __nccwpck_require__(90761), - Rest: __nccwpck_require__(98200), - RestJson: __nccwpck_require__(5883), - RestXml: __nccwpck_require__(15143) - }, +/***/ 5771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * @api private - */ - XML: { - Builder: __nccwpck_require__(23546), - Parser: null // conditionally set based on environment - }, +"use strict"; - /** - * @api private - */ - JSON: { - Builder: __nccwpck_require__(47495), - Parser: __nccwpck_require__(5474) - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionManager = void 0; +const tslib_1 = __nccwpck_require__(4351); +const http2_1 = tslib_1.__importDefault(__nccwpck_require__(85158)); +const node_http2_connection_pool_1 = __nccwpck_require__(95157); +class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2_1.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new node_http2_connection_pool_1.NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) === null || _a === void 0 ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} +exports.NodeHttp2ConnectionManager = NodeHttp2ConnectionManager; - /** - * @api private - */ - Model: { - Api: __nccwpck_require__(17657), - Operation: __nccwpck_require__(28083), - Shape: __nccwpck_require__(71349), - Paginator: __nccwpck_require__(45938), - ResourceWaiter: __nccwpck_require__(41368) - }, - /** - * @api private - */ - apiLoader: __nccwpck_require__(52793), +/***/ }), - /** - * @api private - */ - EndpointCache: (__nccwpck_require__(96323)/* .EndpointCache */ .$) -}); -__nccwpck_require__(55948); -__nccwpck_require__(68903); -__nccwpck_require__(38110); -__nccwpck_require__(1556); -__nccwpck_require__(54995); -__nccwpck_require__(78652); -__nccwpck_require__(58743); -__nccwpck_require__(39925); -__nccwpck_require__(9897); -__nccwpck_require__(99127); -__nccwpck_require__(93985); +/***/ 95157: +/***/ ((__unused_webpack_module, exports) => { -/** - * @readonly - * @return [AWS.SequentialExecutor] a collection of global event listeners that - * are attached to every sent request. - * @see AWS.Request AWS.Request for a list of events to listen for - * @example Logging the time taken to send a request - * AWS.events.on('send', function startSend(resp) { - * resp.startTime = new Date().getTime(); - * }).on('complete', function calculateTime(resp) { - * var time = (new Date().getTime() - resp.startTime) / 1000; - * console.log('Request took ' + time + ' seconds'); - * }); - * - * new AWS.S3().listBuckets(); // prints 'Request took 0.285 seconds' - */ -AWS.events = new AWS.SequentialExecutor(); +"use strict"; -//create endpoint cache lazily -AWS.util.memoizedProperty(AWS, 'endpointCache', function() { - return new AWS.EndpointCache(AWS.config.endpointCacheSize); -}, true); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2ConnectionPool = void 0; +class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions !== null && sessions !== void 0 ? sessions : []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} +exports.NodeHttp2ConnectionPool = NodeHttp2ConnectionPool; /***/ }), -/***/ 53819: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 46999: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); +"use strict"; -/** - * Represents your AWS security credentials, specifically the - * {accessKeyId}, {secretAccessKey}, and optional {sessionToken}. - * Creating a `Credentials` object allows you to pass around your - * security information to configuration and service objects. - * - * Note that this class typically does not need to be constructed manually, - * as the {AWS.Config} and {AWS.Service} classes both accept simple - * options hashes with the three keys. These structures will be converted - * into Credentials objects automatically. - * - * ## Expiring and Refreshing Credentials - * - * Occasionally credentials can expire in the middle of a long-running - * application. In this case, the SDK will automatically attempt to - * refresh the credentials from the storage location if the Credentials - * class implements the {refresh} method. - * - * If you are implementing a credential storage location, you - * will want to create a subclass of the `Credentials` class and - * override the {refresh} method. This method allows credentials to be - * retrieved from the backing store, be it a file system, database, or - * some network storage. The method should reset the credential attributes - * on the object. - * - * @!attribute expired - * @return [Boolean] whether the credentials have been expired and - * require a refresh. Used in conjunction with {expireTime}. - * @!attribute expireTime - * @return [Date] a time when credentials should be considered expired. Used - * in conjunction with {expired}. - * @!attribute accessKeyId - * @return [String] the AWS access key ID - * @!attribute secretAccessKey - * @return [String] the AWS secret access key - * @!attribute sessionToken - * @return [String] an optional AWS session token - */ -AWS.Credentials = AWS.util.inherit({ - /** - * A credentials object can be created using positional arguments or an options - * hash. - * - * @overload AWS.Credentials(accessKeyId, secretAccessKey, sessionToken=null) - * Creates a Credentials object with a given set of credential information - * as positional arguments. - * @param accessKeyId [String] the AWS access key ID - * @param secretAccessKey [String] the AWS secret access key - * @param sessionToken [String] the optional AWS session token - * @example Create a credentials object with AWS credentials - * var creds = new AWS.Credentials('akid', 'secret', 'session'); - * @overload AWS.Credentials(options) - * Creates a Credentials object with a given set of credential information - * as an options hash. - * @option options accessKeyId [String] the AWS access key ID - * @option options secretAccessKey [String] the AWS secret access key - * @option options sessionToken [String] the optional AWS session token - * @example Create a credentials object with AWS credentials - * var creds = new AWS.Credentials({ - * accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'session' - * }); - */ - constructor: function Credentials() { - // hide secretAccessKey from being displayed with util.inspect - AWS.util.hideProperties(this, ['secretAccessKey']); - - this.expired = false; - this.expireTime = null; - this.refreshCallbacks = []; - if (arguments.length === 1 && typeof arguments[0] === 'object') { - var creds = arguments[0].credentials || arguments[0]; - this.accessKeyId = creds.accessKeyId; - this.secretAccessKey = creds.secretAccessKey; - this.sessionToken = creds.sessionToken; - } else { - this.accessKeyId = arguments[0]; - this.secretAccessKey = arguments[1]; - this.sessionToken = arguments[2]; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeHttp2Handler = void 0; +const protocol_http_1 = __nccwpck_require__(64418); +const querystring_builder_1 = __nccwpck_require__(68031); +const http2_1 = __nccwpck_require__(85158); +const get_transformed_headers_1 = __nccwpck_require__(70508); +const node_http2_connection_manager_1 = __nccwpck_require__(5771); +const write_request_body_1 = __nccwpck_require__(73766); +class NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new node_http2_connection_manager_1.NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); } - }, - - /** - * @return [Integer] the number of seconds before {expireTime} during which - * the credentials will be considered expired. - */ - expiryWindow: 15, - - /** - * @return [Boolean] whether the credentials object should call {refresh} - * @note Subclasses should override this method to provide custom refresh - * logic. - */ - needsRefresh: function needsRefresh() { - var currentTime = AWS.util.date.getDate().getTime(); - var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); - - if (this.expireTime && adjustedTime > this.expireTime) { - return true; - } else { - return this.expired || !this.accessKeyId || !this.secretAccessKey; - } - }, - - /** - * Gets the existing credentials, refreshing them if they are not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload credentials when they are already - * loaded into the object. - * - * @callback callback function(err) - * When this callback is called with no error, it means either credentials - * do not need to be refreshed or refreshed credentials information has - * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, - * and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - */ - get: function get(callback) { - var self = this; - if (this.needsRefresh()) { - this.refresh(function(err) { - if (!err) self.expired = false; // reset expired flag - if (callback) callback(err); - }); - } else if (callback) { - callback(); + destroy() { + this.connectionManager.destroy(); } - }, - - /** - * @!method getPromise() - * Returns a 'thenable' promise. - * Gets the existing credentials, refreshing them if they are not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload credentials when they are already - * loaded into the object. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it - * means either credentials do not need to be refreshed or refreshed - * credentials information has been loaded into the object (as the - * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled - * @return [Promise] A promise that represents the state of the `get` call. - * @example Calling the `getPromise` method. - * var promise = credProvider.getPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * @!method refreshPromise() - * Returns a 'thenable' promise. - * Refreshes the credentials. Users should call {get} before attempting - * to forcibly refresh credentials. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it - * means refreshed credentials information has been loaded into the object - * (as the `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled - * @return [Promise] A promise that represents the state of the `refresh` call. - * @example Calling the `refreshPromise` method. - * var promise = credProvider.refreshPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * Refreshes the credentials. Users should call {get} before attempting - * to forcibly refresh credentials. - * - * @callback callback function(err) - * When this callback is called with no error, it means refreshed - * credentials information has been loaded into the object (as the - * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @note Subclasses should override this class to reset the - * {accessKeyId}, {secretAccessKey} and optional {sessionToken} - * on the credentials object and then call the callback with - * any error information. - * @see get - */ - refresh: function refresh(callback) { - this.expired = false; - callback(); - }, - - /** - * @api private - * @param callback - */ - coalesceRefresh: function coalesceRefresh(callback, sync) { - var self = this; - if (self.refreshCallbacks.push(callback) === 1) { - self.load(function onLoad(err) { - AWS.util.arrayEach(self.refreshCallbacks, function(callback) { - if (sync) { - callback(err); - } else { - // callback could throw, so defer to ensure all callbacks are notified - AWS.util.defer(function () { - callback(err); + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a, _b, _c; + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = (_a = request.username) !== null && _a !== void 0 ? _a : ""; + const password = (_b = request.password) !== null && _b !== void 0 ? _b : ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_c = this.config) === null || _c === void 0 ? void 0 : _c.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, }); - } + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = (0, querystring_builder_1.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [http2_1.constants.HTTP2_HEADER_PATH]: path, + [http2_1.constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new protocol_http_1.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: (0, get_transformed_headers_1.getTransformedHeaders)(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + abortSignal.onabort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = (0, write_request_body_1.writeRequestBody)(req, request, requestTimeout); }); - self.refreshCallbacks.length = 0; - }); } - }, - - /** - * @api private - * @param callback - */ - load: function load(callback) { - callback(); - } -}); + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} +exports.NodeHttp2Handler = NodeHttp2Handler; -/** - * @api private - */ -AWS.Credentials.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); - this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); -}; -/** - * @api private - */ -AWS.Credentials.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.getPromise; - delete this.prototype.refreshPromise; -}; +/***/ }), -AWS.util.addPromises(AWS.Credentials); +/***/ 25545: +/***/ ((__unused_webpack_module, exports) => { +"use strict"; -/***/ }), +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setConnectionTimeout = void 0; +const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return; + } + const timeoutId = setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs); + request.on("socket", (socket) => { + if (socket.connecting) { + socket.on("connect", () => { + clearTimeout(timeoutId); + }); + } + else { + clearTimeout(timeoutId); + } + }); +}; +exports.setConnectionTimeout = setConnectionTimeout; -/***/ 57083: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var STS = __nccwpck_require__(57513); +/***/ }), -/** - * Represents temporary credentials retrieved from {AWS.STS}. Without any - * extra parameters, credentials will be fetched from the - * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the - * {AWS.STS.assumeRole} operation will be used to fetch credentials for the - * role instead. - * - * AWS.ChainableTemporaryCredentials differs from AWS.TemporaryCredentials in - * the way masterCredentials and refreshes are handled. - * AWS.ChainableTemporaryCredentials refreshes expired credentials using the - * masterCredentials passed by the user to support chaining of STS credentials. - * However, AWS.TemporaryCredentials recursively collapses the masterCredentials - * during instantiation, precluding the ability to refresh credentials which - * require intermediate, temporary credentials. - * - * For example, if the application should use RoleA, which must be assumed from - * RoleB, and the environment provides credentials which can assume RoleB, then - * AWS.ChainableTemporaryCredentials must be used to support refreshing the - * temporary credentials for RoleA: - * - * ```javascript - * var roleACreds = new AWS.ChainableTemporaryCredentials({ - * params: {RoleArn: 'RoleA'}, - * masterCredentials: new AWS.ChainableTemporaryCredentials({ - * params: {RoleArn: 'RoleB'}, - * masterCredentials: new AWS.EnvironmentCredentials('AWS') - * }) - * }); - * ``` - * - * If AWS.TemporaryCredentials had been used in the previous example, - * `roleACreds` would fail to refresh because `roleACreds` would - * use the environment credentials for the AssumeRole request. - * - * Another difference is that AWS.ChainableTemporaryCredentials creates the STS - * service instance during instantiation while AWS.TemporaryCredentials creates - * the STS service instance during the first refresh. Creating the service - * instance during instantiation effectively captures the master credentials - * from the global config, so that subsequent changes to the global config do - * not affect the master credentials used to refresh the temporary credentials. - * - * This allows an instance of AWS.ChainableTemporaryCredentials to be assigned - * to AWS.config.credentials: - * - * ```javascript - * var envCreds = new AWS.EnvironmentCredentials('AWS'); - * AWS.config.credentials = envCreds; - * // masterCredentials will be envCreds - * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ - * params: {RoleArn: '...'} - * }); - * ``` - * - * Similarly, to use the CredentialProviderChain's default providers as the - * master credentials, simply create a new instance of - * AWS.ChainableTemporaryCredentials: - * - * ```javascript - * AWS.config.credentials = new ChainableTemporaryCredentials({ - * params: {RoleArn: '...'} - * }); - * ``` - * - * @!attribute service - * @return [AWS.STS] the STS service instance used to - * get and refresh temporary credentials from AWS STS. - * @note (see constructor) - */ -AWS.ChainableTemporaryCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new temporary credentials object. - * - * @param options [map] a set of options - * @option options params [map] ({}) a map of options that are passed to the - * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. - * If a `RoleArn` parameter is passed in, credentials will be based on the - * IAM role. If a `SerialNumber` parameter is passed in, {tokenCodeFn} must - * also be passed in or an error will be thrown. - * @option options masterCredentials [AWS.Credentials] the master credentials - * used to get and refresh temporary credentials from AWS STS. By default, - * AWS.config.credentials or AWS.config.credentialProvider will be used. - * @option options tokenCodeFn [Function] (null) Function to provide - * `TokenCode`, if `SerialNumber` is provided for profile in {params}. Function - * is called with value of `SerialNumber` and `callback`, and should provide - * the `TokenCode` or an error to the callback in the format - * `callback(err, token)`. - * @example Creating a new credentials object for generic temporary credentials - * AWS.config.credentials = new AWS.ChainableTemporaryCredentials(); - * @example Creating a new credentials object for an IAM role - * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ - * params: { - * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials' - * } - * }); - * @see AWS.STS.assumeRole - * @see AWS.STS.getSessionToken - */ - constructor: function ChainableTemporaryCredentials(options) { - AWS.Credentials.call(this); - options = options || {}; - this.errorCode = 'ChainableTemporaryCredentialsProviderFailure'; - this.expired = true; - this.tokenCodeFn = null; - - var params = AWS.util.copy(options.params) || {}; - if (params.RoleArn) { - params.RoleSessionName = params.RoleSessionName || 'temporary-credentials'; - } - if (params.SerialNumber) { - if (!options.tokenCodeFn || (typeof options.tokenCodeFn !== 'function')) { - throw new AWS.util.error( - new Error('tokenCodeFn must be a function when params.SerialNumber is given'), - {code: this.errorCode} - ); - } else { - this.tokenCodeFn = options.tokenCodeFn; - } - } - var config = AWS.util.merge( - { - params: params, - credentials: options.masterCredentials || AWS.config.credentials - }, - options.stsConfig || {} - ); - this.service = new STS(config); - }, +/***/ 83751: +/***/ ((__unused_webpack_module, exports) => { - /** - * Refreshes credentials using {AWS.STS.assumeRole} or - * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed - * to the credentials {constructor}. - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see AWS.Credentials.get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, +"use strict"; - /** - * @api private - * @param callback - */ - load: function load(callback) { - var self = this; - var operation = self.service.config.params.RoleArn ? 'assumeRole' : 'getSessionToken'; - this.getTokenCode(function (err, tokenCode) { - var params = {}; - if (err) { - callback(err); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketKeepAlive = void 0; +const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }) => { + if (keepAlive !== true) { return; - } - if (tokenCode) { - params.TokenCode = tokenCode; - } - self.service[operation](params, function (err, data) { - if (!err) { - self.service.credentialsFrom(data, self); - } - callback(err); - }); - }); - }, - - /** - * @api private - */ - getTokenCode: function getTokenCode(callback) { - var self = this; - if (this.tokenCodeFn) { - this.tokenCodeFn(this.service.config.params.SerialNumber, function (err, token) { - if (err) { - var message = err; - if (err instanceof Error) { - message = err.message; - } - callback( - AWS.util.error( - new Error('Error fetching MFA token: ' + message), - { code: self.errorCode} - ) - ); - return; - } - callback(null, token); - }); - } else { - callback(null); } - } -}); + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); +}; +exports.setSocketKeepAlive = setSocketKeepAlive; /***/ }), -/***/ 3498: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var CognitoIdentity = __nccwpck_require__(58291); -var STS = __nccwpck_require__(57513); +/***/ 42618: +/***/ ((__unused_webpack_module, exports) => { -/** - * Represents credentials retrieved from STS Web Identity Federation using - * the Amazon Cognito Identity service. - * - * By default this provider gets credentials using the - * {AWS.CognitoIdentity.getCredentialsForIdentity} service operation, which - * requires either an `IdentityId` or an `IdentityPoolId` (Amazon Cognito - * Identity Pool ID), which is used to call {AWS.CognitoIdentity.getId} to - * obtain an `IdentityId`. If the identity or identity pool is not configured in - * the Amazon Cognito Console to use IAM roles with the appropriate permissions, - * then additionally a `RoleArn` is required containing the ARN of the IAM trust - * policy for the Amazon Cognito role that the user will log into. If a `RoleArn` - * is provided, then this provider gets credentials using the - * {AWS.STS.assumeRoleWithWebIdentity} service operation, after first getting an - * Open ID token from {AWS.CognitoIdentity.getOpenIdToken}. - * - * In addition, if this credential provider is used to provide authenticated - * login, the `Logins` map may be set to the tokens provided by the respective - * identity providers. See {constructor} for an example on creating a credentials - * object with proper property values. - * - * ## Refreshing Credentials from Identity Service - * - * In addition to AWS credentials expiring after a given amount of time, the - * login token from the identity provider will also expire. Once this token - * expires, it will not be usable to refresh AWS credentials, and another - * token will be needed. The SDK does not manage refreshing of the token value, - * but this can be done through a "refresh token" supported by most identity - * providers. Consult the documentation for the identity provider for refreshing - * tokens. Once the refreshed token is acquired, you should make sure to update - * this new token in the credentials object's {params} property. The following - * code will update the WebIdentityToken, assuming you have retrieved an updated - * token from the identity provider: - * - * ```javascript - * AWS.config.credentials.params.Logins['graph.facebook.com'] = updatedToken; - * ``` - * - * Future calls to `credentials.refresh()` will now use the new token. - * - * @!attribute params - * @return [map] the map of params passed to - * {AWS.CognitoIdentity.getId}, - * {AWS.CognitoIdentity.getOpenIdToken}, and - * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the - * `params.WebIdentityToken` property. - * @!attribute data - * @return [map] the raw data response from the call to - * {AWS.CognitoIdentity.getCredentialsForIdentity}, or - * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get - * access to other properties from the response. - * @!attribute identityId - * @return [String] the Cognito ID returned by the last call to - * {AWS.CognitoIdentity.getOpenIdToken}. This ID represents the actual - * final resolved identity ID from Amazon Cognito. - */ -AWS.CognitoIdentityCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * @api private - */ - localStorageKey: { - id: 'aws.cognito.identity-id.', - providers: 'aws.cognito.identity-providers.' - }, +"use strict"; - /** - * Creates a new credentials object. - * @example Creating a new credentials object - * AWS.config.credentials = new AWS.CognitoIdentityCredentials({ - * - * // either IdentityPoolId or IdentityId is required - * // See the IdentityPoolId param for AWS.CognitoIdentity.getID (linked below) - * // See the IdentityId param for AWS.CognitoIdentity.getCredentialsForIdentity - * // or AWS.CognitoIdentity.getOpenIdToken (linked below) - * IdentityPoolId: 'us-east-1:1699ebc0-7900-4099-b910-2df94f52a030', - * IdentityId: 'us-east-1:128d0a74-c82f-4553-916d-90053e4a8b0f' - * - * // optional, only necessary when the identity pool is not configured - * // to use IAM roles in the Amazon Cognito Console - * // See the RoleArn param for AWS.STS.assumeRoleWithWebIdentity (linked below) - * RoleArn: 'arn:aws:iam::1234567890:role/MYAPP-CognitoIdentity', - * - * // optional tokens, used for authenticated login - * // See the Logins param for AWS.CognitoIdentity.getID (linked below) - * Logins: { - * 'graph.facebook.com': 'FBTOKEN', - * 'www.amazon.com': 'AMAZONTOKEN', - * 'accounts.google.com': 'GOOGLETOKEN', - * 'api.twitter.com': 'TWITTERTOKEN', - * 'www.digits.com': 'DIGITSTOKEN' - * }, - * - * // optional name, defaults to web-identity - * // See the RoleSessionName param for AWS.STS.assumeRoleWithWebIdentity (linked below) - * RoleSessionName: 'web', - * - * // optional, only necessary when application runs in a browser - * // and multiple users are signed in at once, used for caching - * LoginId: 'example@gmail.com' - * - * }, { - * // optionally provide configuration to apply to the underlying service clients - * // if configuration is not provided, then configuration will be pulled from AWS.config - * - * // region should match the region your identity pool is located in - * region: 'us-east-1', - * - * // specify timeout options - * httpOptions: { - * timeout: 100 - * } - * }); - * @see AWS.CognitoIdentity.getId - * @see AWS.CognitoIdentity.getCredentialsForIdentity - * @see AWS.STS.assumeRoleWithWebIdentity - * @see AWS.CognitoIdentity.getOpenIdToken - * @see AWS.Config - * @note If a region is not provided in the global AWS.config, or - * specified in the `clientConfig` to the CognitoIdentityCredentials - * constructor, you may encounter a 'Missing credentials in config' error - * when calling making a service call. - */ - constructor: function CognitoIdentityCredentials(params, clientConfig) { - AWS.Credentials.call(this); - this.expired = true; - this.params = params; - this.data = null; - this._identityId = null; - this._clientConfig = AWS.util.copy(clientConfig || {}); - this.loadCachedId(); - var self = this; - Object.defineProperty(this, 'identityId', { - get: function() { - self.loadCachedId(); - return self._identityId || self.params.IdentityId; - }, - set: function(identityId) { - self._identityId = identityId; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setSocketTimeout = void 0; +const setSocketTimeout = (request, reject, timeoutInMs = 0) => { + request.setTimeout(timeoutInMs, () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); }); - }, +}; +exports.setSocketTimeout = setSocketTimeout; - /** - * Refreshes credentials using {AWS.CognitoIdentity.getCredentialsForIdentity}, - * or {AWS.STS.assumeRoleWithWebIdentity}. - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see AWS.Credentials.get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, - /** - * @api private - * @param callback - */ - load: function load(callback) { - var self = this; - self.createClients(); - self.data = null; - self._identityId = null; - self.getId(function(err) { - if (!err) { - if (!self.params.RoleArn) { - self.getCredentialsForIdentity(callback); - } else { - self.getCredentialsFromSTS(callback); - } - } else { - self.clearIdOnNotAuthorized(err); - callback(err); - } - }); - }, +/***/ }), - /** - * Clears the cached Cognito ID associated with the currently configured - * identity pool ID. Use this to manually invalidate your cache if - * the identity pool ID was deleted. - */ - clearCachedId: function clearCache() { - this._identityId = null; - delete this.params.IdentityId; - - var poolId = this.params.IdentityPoolId; - var loginId = this.params.LoginId || ''; - delete this.storage[this.localStorageKey.id + poolId + loginId]; - delete this.storage[this.localStorageKey.providers + poolId + loginId]; - }, +/***/ 23211: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; - /** - * @api private - */ - clearIdOnNotAuthorized: function clearIdOnNotAuthorized(err) { - var self = this; - if (err.code == 'NotAuthorizedException') { - self.clearCachedId(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Collector = void 0; +const stream_1 = __nccwpck_require__(12781); +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; } - }, + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} +exports.Collector = Collector; - /** - * Retrieves a Cognito ID, loading from cache if it was already retrieved - * on this device. - * - * @callback callback function(err, identityId) - * @param err [Error, null] an error object if the call failed or null if - * it succeeded. - * @param identityId [String, null] if successful, the callback will return - * the Cognito ID. - * @note If not loaded explicitly, the Cognito ID is loaded and stored in - * localStorage in the browser environment of a device. - * @api private - */ - getId: function getId(callback) { - var self = this; - if (typeof self.params.IdentityId === 'string') { - return callback(null, self.params.IdentityId); - } - - self.cognito.getId(function(err, data) { - if (!err && data.IdentityId) { - self.params.IdentityId = data.IdentityId; - callback(null, data.IdentityId); - } else { - callback(err); - } - }); - }, +/***/ }), - /** - * @api private - */ - loadCredentials: function loadCredentials(data, credentials) { - if (!data || !credentials) return; - credentials.expired = false; - credentials.accessKeyId = data.Credentials.AccessKeyId; - credentials.secretAccessKey = data.Credentials.SecretKey; - credentials.sessionToken = data.Credentials.SessionToken; - credentials.expireTime = data.Credentials.Expiration; - }, +/***/ 81030: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * @api private - */ - getCredentialsForIdentity: function getCredentialsForIdentity(callback) { - var self = this; - self.cognito.getCredentialsForIdentity(function(err, data) { - if (!err) { - self.cacheId(data); - self.data = data; - self.loadCredentials(self.data, self); - } else { - self.clearIdOnNotAuthorized(err); - } - callback(err); - }); - }, +"use strict"; - /** - * @api private - */ - getCredentialsFromSTS: function getCredentialsFromSTS(callback) { - var self = this; - self.cognito.getOpenIdToken(function(err, data) { - if (!err) { - self.cacheId(data); - self.params.WebIdentityToken = data.Token; - self.webIdentityCredentials.refresh(function(webErr) { - if (!webErr) { - self.data = self.webIdentityCredentials.data; - self.sts.credentialsFrom(self.data, self); - } - callback(webErr); - }); - } else { - self.clearIdOnNotAuthorized(err); - callback(err); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.streamCollector = void 0; +const collector_1 = __nccwpck_require__(23211); +const streamCollector = (stream) => new Promise((resolve, reject) => { + const collector = new collector_1.Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); }); - }, + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); +}); +exports.streamCollector = streamCollector; - /** - * @api private - */ - loadCachedId: function loadCachedId() { - var self = this; - - // in the browser we source default IdentityId from localStorage - if (AWS.util.isBrowser() && !self.params.IdentityId) { - var id = self.getStorage('id'); - if (id && self.params.Logins) { - var actualProviders = Object.keys(self.params.Logins); - var cachedProviders = - (self.getStorage('providers') || '').split(','); - - // only load ID if at least one provider used this ID before - var intersect = cachedProviders.filter(function(n) { - return actualProviders.indexOf(n) !== -1; - }); - if (intersect.length !== 0) { - self.params.IdentityId = id; - } - } else if (id) { - self.params.IdentityId = id; - } - } - }, - /** - * @api private - */ - createClients: function() { - var clientConfig = this._clientConfig; - this.webIdentityCredentials = this.webIdentityCredentials || - new AWS.WebIdentityCredentials(this.params, clientConfig); - if (!this.cognito) { - var cognitoConfig = AWS.util.merge({}, clientConfig); - cognitoConfig.params = this.params; - this.cognito = new CognitoIdentity(cognitoConfig); - } - this.sts = this.sts || new STS(clientConfig); - }, +/***/ }), - /** - * @api private - */ - cacheId: function cacheId(data) { - this._identityId = data.IdentityId; - this.params.IdentityId = this._identityId; +/***/ 73766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // cache this IdentityId in browser localStorage if possible - if (AWS.util.isBrowser()) { - this.setStorage('id', data.IdentityId); +"use strict"; - if (this.params.Logins) { - this.setStorage('providers', Object.keys(this.params.Logins).join(',')); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.writeRequestBody = void 0; +const stream_1 = __nccwpck_require__(12781); +const MIN_WAIT_TIME = 1000; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + var _a; + const headers = (_a = request.headers) !== null && _a !== void 0 ? _a : {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let hasError = false; + if (expect === "100-continue") { + await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + clearTimeout(timeoutId); + resolve(); + }); + httpRequest.on("error", () => { + hasError = true; + clearTimeout(timeoutId); + resolve(); + }); + }), + ]); } - }, + if (!hasError) { + writeBody(httpRequest, request.body); + } +} +exports.writeRequestBody = writeRequestBody; +function writeBody(httpRequest, body) { + if (body instanceof stream_1.Readable) { + body.pipe(httpRequest); + } + else if (body) { + httpRequest.end(Buffer.from(body)); + } + else { + httpRequest.end(); + } +} - /** - * @api private - */ - getStorage: function getStorage(key) { - return this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')]; - }, - /** - * @api private - */ - setStorage: function setStorage(key, val) { - try { - this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')] = val; - } catch (_) {} - }, +/***/ }), - /** - * @api private - */ - storage: (function() { - try { - var storage = AWS.util.isBrowser() && window.localStorage !== null && typeof window.localStorage === 'object' ? - window.localStorage : {}; +/***/ 63936: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Test set/remove which would throw an error in Safari's private browsing - storage['aws.test-storage'] = 'foobar'; - delete storage['aws.test-storage']; +"use strict"; - return storage; - } catch (_) { - return {}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CredentialsProviderError = void 0; +const ProviderError_1 = __nccwpck_require__(23324); +class CredentialsProviderError extends ProviderError_1.ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); } - })() -}); +} +exports.CredentialsProviderError = CredentialsProviderError; /***/ }), -/***/ 36965: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 23324: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); +"use strict"; -/** - * Creates a credential provider chain that searches for AWS credentials - * in a list of credential providers specified by the {providers} property. - * - * By default, the chain will use the {defaultProviders} to resolve credentials. - * These providers will look in the environment using the - * {AWS.EnvironmentCredentials} class with the 'AWS' and 'AMAZON' prefixes. - * - * ## Setting Providers - * - * Each provider in the {providers} list should be a function that returns - * a {AWS.Credentials} object, or a hardcoded credentials object. The function - * form allows for delayed execution of the credential construction. - * - * ## Resolving Credentials from a Chain - * - * Call {resolve} to return the first valid credential object that can be - * loaded by the provider chain. - * - * For example, to resolve a chain with a custom provider that checks a file - * on disk after the set of {defaultProviders}: - * - * ```javascript - * var diskProvider = new AWS.FileSystemCredentials('./creds.json'); - * var chain = new AWS.CredentialProviderChain(); - * chain.providers.push(diskProvider); - * chain.resolve(); - * ``` - * - * The above code will return the `diskProvider` object if the - * file contains credentials and the `defaultProviders` do not contain - * any credential settings. - * - * @!attribute providers - * @return [Array] - * a list of credentials objects or functions that return credentials - * objects. If the provider is a function, the function will be - * executed lazily when the provider needs to be checked for valid - * credentials. By default, this object will be set to the - * {defaultProviders}. - * @see defaultProviders - */ -AWS.CredentialProviderChain = AWS.util.inherit(AWS.Credentials, { - - /** - * Creates a new CredentialProviderChain with a default set of providers - * specified by {defaultProviders}. - */ - constructor: function CredentialProviderChain(providers) { - if (providers) { - this.providers = providers; - } else { - this.providers = AWS.CredentialProviderChain.defaultProviders.slice(0); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ProviderError = void 0; +class ProviderError extends Error { + constructor(message, tryNextLink = true) { + super(message); + this.tryNextLink = tryNextLink; + this.name = "ProviderError"; + Object.setPrototypeOf(this, ProviderError.prototype); } - this.resolveCallbacks = []; - }, + static from(error, tryNextLink = true) { + return Object.assign(new this(error.message, tryNextLink), error); + } +} +exports.ProviderError = ProviderError; - /** - * @!method resolvePromise() - * Returns a 'thenable' promise. - * Resolves the provider chain by searching for the first set of - * credentials in {providers}. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function(credentials) - * Called if the promise is fulfilled and the provider resolves the chain - * to a credentials object - * @param credentials [AWS.Credentials] the credentials object resolved - * by the provider chain. - * @callback rejectedCallback function(error) - * Called if the promise is rejected. - * @param err [Error] the error object returned if no credentials are found. - * @return [Promise] A promise that represents the state of the `resolve` method call. - * @example Calling the `resolvePromise` method. - * var promise = chain.resolvePromise(); - * promise.then(function(credentials) { ... }, function(err) { ... }); - */ - - /** - * Resolves the provider chain by searching for the first set of - * credentials in {providers}. - * - * @callback callback function(err, credentials) - * Called when the provider resolves the chain to a credentials object - * or null if no credentials can be found. - * - * @param err [Error] the error object returned if no credentials are - * found. - * @param credentials [AWS.Credentials] the credentials object resolved - * by the provider chain. - * @return [AWS.CredentialProviderChain] the provider, for chaining. - */ - resolve: function resolve(callback) { - var self = this; - if (self.providers.length === 0) { - callback(new Error('No providers')); - return self; - } - - if (self.resolveCallbacks.push(callback) === 1) { - var index = 0; - var providers = self.providers.slice(0); - - function resolveNext(err, creds) { - if ((!err && creds) || index === providers.length) { - AWS.util.arrayEach(self.resolveCallbacks, function (callback) { - callback(err, creds); - }); - self.resolveCallbacks.length = 0; - return; - } - - var provider = providers[index++]; - if (typeof provider === 'function') { - creds = provider.call(); - } else { - creds = provider; - } - if (creds.get) { - creds.get(function (getErr) { - resolveNext(getErr, getErr ? null : creds); - }); - } else { - resolveNext(null, creds); - } - } +/***/ }), + +/***/ 50429: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; - resolveNext(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TokenProviderError = void 0; +const ProviderError_1 = __nccwpck_require__(23324); +class TokenProviderError extends ProviderError_1.ProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); } +} +exports.TokenProviderError = TokenProviderError; - return self; - } -}); -/** - * The default set of providers used by a vanilla CredentialProviderChain. - * - * In the browser: - * - * ```javascript - * AWS.CredentialProviderChain.defaultProviders = [] - * ``` - * - * In Node.js: - * - * ```javascript - * AWS.CredentialProviderChain.defaultProviders = [ - * function () { return new AWS.EnvironmentCredentials('AWS'); }, - * function () { return new AWS.EnvironmentCredentials('AMAZON'); }, - * function () { return new AWS.SsoCredentials(); }, - * function () { return new AWS.SharedIniFileCredentials(); }, - * function () { return new AWS.ECSCredentials(); }, - * function () { return new AWS.ProcessCredentials(); }, - * function () { return new AWS.TokenFileWebIdentityCredentials(); }, - * function () { return new AWS.EC2MetadataCredentials() } - * ] - * ``` - */ -AWS.CredentialProviderChain.defaultProviders = []; +/***/ }), -/** - * @api private - */ -AWS.CredentialProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); -}; +/***/ 45079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * @api private - */ -AWS.CredentialProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.resolvePromise; -}; +"use strict"; -AWS.util.addPromises(AWS.CredentialProviderChain); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.chain = void 0; +const ProviderError_1 = __nccwpck_require__(23324); +function chain(...providers) { + return () => { + let promise = Promise.reject(new ProviderError_1.ProviderError("No providers in chain")); + for (const provider of providers) { + promise = promise.catch((err) => { + if (err === null || err === void 0 ? void 0 : err.tryNextLink) { + return provider(); + } + throw err; + }); + } + return promise; + }; +} +exports.chain = chain; /***/ }), -/***/ 73379: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 51322: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -__nccwpck_require__(25768); +"use strict"; -/** - * Represents credentials received from the metadata service on an EC2 instance. - * - * By default, this class will connect to the metadata service using - * {AWS.MetadataService} and attempt to load any available credentials. If it - * can connect, and credentials are available, these will be used with zero - * configuration. - * - * This credentials class will by default timeout after 1 second of inactivity - * and retry 3 times. - * If your requests to the EC2 metadata service are timing out, you can increase - * these values by configuring them directly: - * - * ```javascript - * AWS.config.credentials = new AWS.EC2MetadataCredentials({ - * httpOptions: { timeout: 5000 }, // 5 second timeout - * maxRetries: 10, // retry 10 times - * retryDelayOptions: { base: 200 }, // see AWS.Config for information - * logger: console // see AWS.Config for information - * }); - * ``` - * - * If your requests are timing out in connecting to the metadata service, such - * as when testing on a development machine, you can use the connectTimeout - * option, specified in milliseconds, which also defaults to 1 second. - * - * If the requests failed or returns expired credentials, it will - * extend the expiration of current credential, with a warning message. For more - * information, please go to: - * https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html - * - * @!attribute originalExpiration - * @return [Date] The optional original expiration of the current credential. - * In case of AWS outage, the EC2 metadata will extend expiration of the - * existing credential. - * - * @see AWS.Config.retryDelayOptions - * @see AWS.Config.logger - * - * @!macro nobrowser - */ -AWS.EC2MetadataCredentials = AWS.util.inherit(AWS.Credentials, { - constructor: function EC2MetadataCredentials(options) { - AWS.Credentials.call(this); - - options = options ? AWS.util.copy(options) : {}; - options = AWS.util.merge( - {maxRetries: this.defaultMaxRetries}, options); - if (!options.httpOptions) options.httpOptions = {}; - options.httpOptions = AWS.util.merge( - {timeout: this.defaultTimeout, - connectTimeout: this.defaultConnectTimeout}, - options.httpOptions); - - this.metadataService = new AWS.MetadataService(options); - this.logger = options.logger || AWS.config && AWS.config.logger; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromStatic = void 0; +const fromStatic = (staticValue) => () => Promise.resolve(staticValue); +exports.fromStatic = fromStatic; - /** - * @api private - */ - defaultTimeout: 1000, - - /** - * @api private - */ - defaultConnectTimeout: 1000, - - /** - * @api private - */ - defaultMaxRetries: 3, - - /** - * The original expiration of the current credential. In case of AWS - * outage, the EC2 metadata will extend expiration of the existing - * credential. - */ - originalExpiration: undefined, - - /** - * Loads the credentials from the instance metadata service - * - * @callback callback function(err) - * Called when the instance metadata service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, - /** - * @api private - * @param callback - */ - load: function load(callback) { - var self = this; - self.metadataService.loadCredentials(function(err, creds) { - if (err) { - if (self.hasLoadedCredentials()) { - self.extendExpirationIfExpired(); - callback(); - } else { - callback(err); - } - } else { - self.setCredentials(creds); - self.extendExpirationIfExpired(); - callback(); - } - }); - }, +/***/ }), - /** - * Whether this credential has been loaded. - * @api private - */ - hasLoadedCredentials: function hasLoadedCredentials() { - return this.AccessKeyId && this.secretAccessKey; - }, +/***/ 79721: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * if expired, extend the expiration by 15 minutes base plus a jitter of 5 - * minutes range. - * @api private - */ - extendExpirationIfExpired: function extendExpirationIfExpired() { - if (this.needsRefresh()) { - this.originalExpiration = this.originalExpiration || this.expireTime; - this.expired = false; - var nextTimeout = 15 * 60 + Math.floor(Math.random() * 5 * 60); - var currentTime = AWS.util.date.getDate().getTime(); - this.expireTime = new Date(currentTime + nextTimeout * 1000); - // TODO: add doc link; - this.logger.warn('Attempting credential expiration extension due to a ' - + 'credential service availability issue. A refresh of these ' - + 'credentials will be attempted again at ' + this.expireTime - + '\nFor more information, please visit: https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html'); - } - }, +"use strict"; - /** - * Update the credential with new credential responded from EC2 metadata - * service. - * @api private - */ - setCredentials: function setCredentials(creds) { - var currentTime = AWS.util.date.getDate().getTime(); - var expireTime = new Date(creds.Expiration); - this.expired = currentTime >= expireTime ? true : false; - this.metadata = creds; - this.accessKeyId = creds.AccessKeyId; - this.secretAccessKey = creds.SecretAccessKey; - this.sessionToken = creds.Token; - this.expireTime = expireTime; - } -}); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(63936), exports); +tslib_1.__exportStar(__nccwpck_require__(23324), exports); +tslib_1.__exportStar(__nccwpck_require__(50429), exports); +tslib_1.__exportStar(__nccwpck_require__(45079), exports); +tslib_1.__exportStar(__nccwpck_require__(51322), exports); +tslib_1.__exportStar(__nccwpck_require__(49762), exports); /***/ }), -/***/ 10645: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 49762: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); +"use strict"; -/** - * Represents credentials received from relative URI specified in the ECS container. - * - * This class will request refreshable credentials from the relative URI - * specified by the AWS_CONTAINER_CREDENTIALS_RELATIVE_URI or the - * AWS_CONTAINER_CREDENTIALS_FULL_URI environment variable. If valid credentials - * are returned in the response, these will be used with zero configuration. - * - * This credentials class will by default timeout after 1 second of inactivity - * and retry 3 times. - * If your requests to the relative URI are timing out, you can increase - * the value by configuring them directly: - * - * ```javascript - * AWS.config.credentials = new AWS.ECSCredentials({ - * httpOptions: { timeout: 5000 }, // 5 second timeout - * maxRetries: 10, // retry 10 times - * retryDelayOptions: { base: 200 } // see AWS.Config for information - * }); - * ``` - * - * @see AWS.Config.retryDelayOptions - * - * @!macro nobrowser - */ -AWS.ECSCredentials = AWS.RemoteCredentials; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.memoize = void 0; +const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options === null || options === void 0 ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; +exports.memoize = memoize; /***/ }), -/***/ 57714: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); +/***/ 89179: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Represents credentials from the environment. - * - * By default, this class will look for the matching environment variables - * prefixed by a given {envPrefix}. The un-prefixed environment variable names - * for each credential value is listed below: - * - * ```javascript - * accessKeyId: ACCESS_KEY_ID - * secretAccessKey: SECRET_ACCESS_KEY - * sessionToken: SESSION_TOKEN - * ``` - * - * With the default prefix of 'AWS', the environment variables would be: - * - * AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN - * - * @!attribute envPrefix - * @readonly - * @return [String] the prefix for the environment variable names excluding - * the separating underscore ('_'). - */ -AWS.EnvironmentCredentials = AWS.util.inherit(AWS.Credentials, { - - /** - * Creates a new EnvironmentCredentials class with a given variable - * prefix {envPrefix}. For example, to load credentials using the 'AWS' - * prefix: - * - * ```javascript - * var creds = new AWS.EnvironmentCredentials('AWS'); - * creds.accessKeyId == 'AKID' // from AWS_ACCESS_KEY_ID env var - * ``` - * - * @param envPrefix [String] the prefix to use (e.g., 'AWS') for environment - * variables. Do not include the separating underscore. - */ - constructor: function EnvironmentCredentials(envPrefix) { - AWS.Credentials.call(this); - this.envPrefix = envPrefix; - this.get(function() {}); - }, +"use strict"; - /** - * Loads credentials from the environment using the prefixed - * environment variables. - * - * @callback callback function(err) - * Called after the (prefixed) ACCESS_KEY_ID, SECRET_ACCESS_KEY, and - * SESSION_TOKEN environment variables are read. When this callback is - * called with no error, it means that the credentials information has - * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, - * and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - if (!callback) callback = AWS.util.fn.callback; - - if (!process || !process.env) { - callback(AWS.util.error( - new Error('No process info or environment variables available'), - { code: 'EnvironmentCredentialsProviderFailure' } - )); - return; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Field = void 0; +const types_1 = __nccwpck_require__(55756); +class Field { + constructor({ name, kind = types_1.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} +exports.Field = Field; - var keys = ['ACCESS_KEY_ID', 'SECRET_ACCESS_KEY', 'SESSION_TOKEN']; - var values = []; - for (var i = 0; i < keys.length; i++) { - var prefix = ''; - if (this.envPrefix) prefix = this.envPrefix + '_'; - values[i] = process.env[prefix + keys[i]]; - if (!values[i] && keys[i] !== 'SESSION_TOKEN') { - callback(AWS.util.error( - new Error('Variable ' + prefix + keys[i] + ' not set.'), - { code: 'EnvironmentCredentialsProviderFailure' } - )); - return; - } - } +/***/ }), - this.expired = false; - AWS.Credentials.apply(this, values); - callback(); - } +/***/ 99242: +/***/ ((__unused_webpack_module, exports) => { -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Fields = void 0; +class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} +exports.Fields = Fields; /***/ }), -/***/ 27454: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 63206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; -var AWS = __nccwpck_require__(28437); +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * Represents credentials from a JSON file on disk. - * If the credentials expire, the SDK can {refresh} the credentials - * from the file. - * - * The format of the file should be similar to the options passed to - * {AWS.Config}: - * - * ```javascript - * {accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'optional'} - * ``` - * - * @example Loading credentials from disk - * var creds = new AWS.FileSystemCredentials('./configuration.json'); - * creds.accessKeyId == 'AKID' - * - * @!attribute filename - * @readonly - * @return [String] the path to the JSON file on disk containing the - * credentials. - * @!macro nobrowser - */ -AWS.FileSystemCredentials = AWS.util.inherit(AWS.Credentials, { - - /** - * @overload AWS.FileSystemCredentials(filename) - * Creates a new FileSystemCredentials object from a filename - * - * @param filename [String] the path on disk to the JSON file to load. - */ - constructor: function FileSystemCredentials(filename) { - AWS.Credentials.call(this); - this.filename = filename; - this.get(function() {}); - }, - /** - * Loads the credentials from the {filename} on disk. - * - * @callback callback function(err) - * Called after the JSON file on disk is read and parsed. When this callback - * is called with no error, it means that the credentials information - * has been loaded into the object (as the `accessKeyId`, `secretAccessKey`, - * and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - if (!callback) callback = AWS.util.fn.callback; - try { - var creds = JSON.parse(AWS.util.readFileSync(this.filename)); - AWS.Credentials.call(this, creds); - if (!this.accessKeyId || !this.secretAccessKey) { - throw AWS.util.error( - new Error('Credentials not set in ' + this.filename), - { code: 'FileSystemCredentialsProviderFailure' } - ); - } - this.expired = false; - callback(); - } catch (err) { - callback(err); - } - } +/***/ }), -}); +/***/ 38746: +/***/ ((__unused_webpack_module, exports) => { +"use strict"; -/***/ }), +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpRequest = void 0; +class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static isInstance(request) { + if (!request) + return false; + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + const cloned = new HttpRequest({ + ...this, + headers: { ...this.headers }, + }); + if (cloned.query) + cloned.query = cloneQuery(cloned.query); + return cloned; + } +} +exports.HttpRequest = HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} -/***/ 80371: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var proc = __nccwpck_require__(32081); -var iniLoader = AWS.util.iniLoader; +/***/ }), -/** - * Represents credentials loaded from shared credentials file - * (defaulting to ~/.aws/credentials or defined by the - * `AWS_SHARED_CREDENTIALS_FILE` environment variable). - * - * ## Using process credentials - * - * The credentials file can specify a credential provider that executes - * a given process and attempts to read its stdout to recieve a JSON payload - * containing the credentials: - * - * [default] - * credential_process = /usr/bin/credential_proc - * - * Automatically handles refreshing credentials if an Expiration time is - * provided in the credentials payload. Credentials supplied in the same profile - * will take precedence over the credential_process. - * - * Sourcing credentials from an external process can potentially be dangerous, - * so proceed with caution. Other credential providers should be preferred if - * at all possible. If using this option, you should make sure that the shared - * credentials file is as locked down as possible using security best practices - * for your operating system. - * - * ## Using custom profiles - * - * The SDK supports loading credentials for separate profiles. This can be done - * in two ways: - * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to - * loading the SDK. - * 2. Directly load the AWS.ProcessCredentials provider: - * - * ```javascript - * var creds = new AWS.ProcessCredentials({profile: 'myprofile'}); - * AWS.config.credentials = creds; - * ``` - * - * @!macro nobrowser - */ -AWS.ProcessCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new ProcessCredentials object. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var) - * the filename to use when loading credentials. - * @option options callback [Function] (err) Credentials are eagerly loaded - * by the constructor. When the callback is called with no error, the - * credentials have been loaded successfully. - */ - constructor: function ProcessCredentials(options) { - AWS.Credentials.call(this); - - options = options || {}; - - this.filename = options.filename; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.get(options.callback || AWS.util.fn.noop); - }, +/***/ 26322: +/***/ ((__unused_webpack_module, exports) => { - /** - * @api private - */ - load: function load(callback) { - var self = this; - try { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' not found'), - { code: 'ProcessCredentialsProviderFailure' } - ); - } +"use strict"; - if (profile['credential_process']) { - this.loadViaCredentialProcess(profile, function(err, data) { - if (err) { - callback(err, null); - } else { - self.expired = false; - self.accessKeyId = data.AccessKeyId; - self.secretAccessKey = data.SecretAccessKey; - self.sessionToken = data.SessionToken; - if (data.Expiration) { - self.expireTime = new Date(data.Expiration); - } - callback(null); - } - }); - } else { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' did not include credential process'), - { code: 'ProcessCredentialsProviderFailure' } - ); - } - } catch (err) { - callback(err); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpResponse = void 0; +class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; } - }, +} +exports.HttpResponse = HttpResponse; - /** - * Executes the credential_process and retrieves - * credentials from the output - * @api private - * @param profile [map] credentials profile - * @throws ProcessCredentialsProviderFailure - */ - loadViaCredentialProcess: function loadViaCredentialProcess(profile, callback) { - proc.exec(profile['credential_process'], { env: process.env }, function(err, stdOut, stdErr) { - if (err) { - callback(AWS.util.error( - new Error('credential_process returned error'), - { code: 'ProcessCredentialsProviderFailure'} - ), null); - } else { - try { - var credData = JSON.parse(stdOut); - if (credData.Expiration) { - var currentTime = AWS.util.date.getDate(); - var expireTime = new Date(credData.Expiration); - if (expireTime < currentTime) { - throw Error('credential_process returned expired credentials'); - } - } - if (credData.Version !== 1) { - throw Error('credential_process does not return Version == 1'); - } - callback(null, credData); - } catch (err) { - callback(AWS.util.error( - new Error(err.message), - { code: 'ProcessCredentialsProviderFailure'} - ), null); - } - } - }); - }, +/***/ }), - /** - * Loads the credentials from the credential process - * - * @callback callback function(err) - * Called after the credential process has been executed. When this - * callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh(callback || AWS.util.fn.callback); - } -}); +/***/ 64418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(89179), exports); +tslib_1.__exportStar(__nccwpck_require__(99242), exports); +tslib_1.__exportStar(__nccwpck_require__(63206), exports); +tslib_1.__exportStar(__nccwpck_require__(38746), exports); +tslib_1.__exportStar(__nccwpck_require__(26322), exports); +tslib_1.__exportStar(__nccwpck_require__(61466), exports); +tslib_1.__exportStar(__nccwpck_require__(19135), exports); /***/ }), -/***/ 88764: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 61466: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437), - ENV_RELATIVE_URI = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI', - ENV_FULL_URI = 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - ENV_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN', - FULL_URI_UNRESTRICTED_PROTOCOLS = ['https:'], - FULL_URI_ALLOWED_PROTOCOLS = ['http:', 'https:'], - FULL_URI_ALLOWED_HOSTNAMES = ['localhost', '127.0.0.1'], - RELATIVE_URI_HOST = '169.254.170.2'; +"use strict"; -/** - * Represents credentials received from specified URI. - * - * This class will request refreshable credentials from the relative URI - * specified by the AWS_CONTAINER_CREDENTIALS_RELATIVE_URI or the - * AWS_CONTAINER_CREDENTIALS_FULL_URI environment variable. If valid credentials - * are returned in the response, these will be used with zero configuration. - * - * This credentials class will by default timeout after 1 second of inactivity - * and retry 3 times. - * If your requests to the relative URI are timing out, you can increase - * the value by configuring them directly: - * - * ```javascript - * AWS.config.credentials = new AWS.RemoteCredentials({ - * httpOptions: { timeout: 5000 }, // 5 second timeout - * maxRetries: 10, // retry 10 times - * retryDelayOptions: { base: 200 } // see AWS.Config for information - * }); - * ``` - * - * @see AWS.Config.retryDelayOptions - * - * @!macro nobrowser - */ -AWS.RemoteCredentials = AWS.util.inherit(AWS.Credentials, { - constructor: function RemoteCredentials(options) { - AWS.Credentials.call(this); - options = options ? AWS.util.copy(options) : {}; - if (!options.httpOptions) options.httpOptions = {}; - options.httpOptions = AWS.util.merge( - this.httpOptions, options.httpOptions); - AWS.util.update(this, options); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isValidHostname = void 0; +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +exports.isValidHostname = isValidHostname; - /** - * @api private - */ - httpOptions: { timeout: 1000 }, - - /** - * @api private - */ - maxRetries: 3, - - /** - * @api private - */ - isConfiguredForEcsCredentials: function isConfiguredForEcsCredentials() { - return Boolean( - process && - process.env && - (process.env[ENV_RELATIVE_URI] || process.env[ENV_FULL_URI]) - ); - }, - /** - * @api private - */ - getECSFullUri: function getECSFullUri() { - if (process && process.env) { - var relative = process.env[ENV_RELATIVE_URI], - full = process.env[ENV_FULL_URI]; - if (relative) { - return 'http://' + RELATIVE_URI_HOST + relative; - } else if (full) { - var parsed = AWS.util.urlParse(full); - if (FULL_URI_ALLOWED_PROTOCOLS.indexOf(parsed.protocol) < 0) { - throw AWS.util.error( - new Error('Unsupported protocol: AWS.RemoteCredentials supports ' - + FULL_URI_ALLOWED_PROTOCOLS.join(',') + ' only; ' - + parsed.protocol + ' requested.'), - { code: 'ECSCredentialsProviderFailure' } - ); - } - - if (FULL_URI_UNRESTRICTED_PROTOCOLS.indexOf(parsed.protocol) < 0 && - FULL_URI_ALLOWED_HOSTNAMES.indexOf(parsed.hostname) < 0) { - throw AWS.util.error( - new Error('Unsupported hostname: AWS.RemoteCredentials only supports ' - + FULL_URI_ALLOWED_HOSTNAMES.join(',') + ' for ' + parsed.protocol + '; ' - + parsed.protocol + '//' + parsed.hostname + ' requested.'), - { code: 'ECSCredentialsProviderFailure' } - ); - } - - return full; - } else { - throw AWS.util.error( - new Error('Variable ' + ENV_RELATIVE_URI + ' or ' + ENV_FULL_URI + - ' must be set to use AWS.RemoteCredentials.'), - { code: 'ECSCredentialsProviderFailure' } - ); - } - } else { - throw AWS.util.error( - new Error('No process info available'), - { code: 'ECSCredentialsProviderFailure' } - ); - } - }, +/***/ }), - /** - * @api private - */ - getECSAuthToken: function getECSAuthToken() { - if (process && process.env && process.env[ENV_FULL_URI]) { - return process.env[ENV_AUTH_TOKEN]; - } - }, +/***/ 19135: +/***/ ((__unused_webpack_module, exports) => { - /** - * @api private - */ - credsFormatIsValid: function credsFormatIsValid(credData) { - return (!!credData.accessKeyId && !!credData.secretAccessKey && - !!credData.sessionToken && !!credData.expireTime); - }, +"use strict"; - /** - * @api private - */ - formatCreds: function formatCreds(credData) { - if (!!credData.credentials) { - credData = credData.credentials; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - return { - expired: false, - accessKeyId: credData.accessKeyId || credData.AccessKeyId, - secretAccessKey: credData.secretAccessKey || credData.SecretAccessKey, - sessionToken: credData.sessionToken || credData.Token, - expireTime: new Date(credData.expiration || credData.Expiration) - }; - }, - /** - * @api private - */ - request: function request(url, callback) { - var httpRequest = new AWS.HttpRequest(url); - httpRequest.method = 'GET'; - httpRequest.headers.Accept = 'application/json'; - var token = this.getECSAuthToken(); - if (token) { - httpRequest.headers.Authorization = token; - } - AWS.util.handleRequestWithRetries(httpRequest, this, callback); - }, +/***/ }), - /** - * Loads the credentials from the relative URI specified by container - * - * @callback callback function(err) - * Called when the request to the relative URI responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, `sessionToken`, and `expireTime` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, +/***/ 68031: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * @api private - */ - load: function load(callback) { - var self = this; - var fullUri; +"use strict"; - try { - fullUri = this.getECSFullUri(); - } catch (err) { - callback(err); - return; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildQueryString = void 0; +const util_uri_escape_1 = __nccwpck_require__(54197); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, util_uri_escape_1.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, util_uri_escape_1.escapeUri)(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + parts.push(qsEntry); + } } + return parts.join("&"); +} +exports.buildQueryString = buildQueryString; - this.request(fullUri, function(err, data) { - if (!err) { - try { - data = JSON.parse(data); - var creds = self.formatCreds(data); - if (!self.credsFormatIsValid(creds)) { - throw AWS.util.error( - new Error('Response data is not in valid format'), - { code: 'ECSCredentialsProviderFailure' } - ); - } - AWS.util.update(self, creds); - } catch (dataError) { - err = dataError; + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseQueryString = void 0; +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } } - } - callback(err, creds); - }); - } -}); + } + return query; +} +exports.parseQueryString = parseQueryString; /***/ }), -/***/ 15037: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 68415: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -var STS = __nccwpck_require__(57513); +"use strict"; -/** - * Represents credentials retrieved from STS SAML support. - * - * By default this provider gets credentials using the - * {AWS.STS.assumeRoleWithSAML} service operation. This operation - * requires a `RoleArn` containing the ARN of the IAM trust policy for the - * application for which credentials will be given, as well as a `PrincipalArn` - * representing the ARN for the SAML identity provider. In addition, the - * `SAMLAssertion` must be set to the token provided by the identity - * provider. See {constructor} for an example on creating a credentials - * object with proper `RoleArn`, `PrincipalArn`, and `SAMLAssertion` values. - * - * ## Refreshing Credentials from Identity Service - * - * In addition to AWS credentials expiring after a given amount of time, the - * login token from the identity provider will also expire. Once this token - * expires, it will not be usable to refresh AWS credentials, and another - * token will be needed. The SDK does not manage refreshing of the token value, - * but this can be done through a "refresh token" supported by most identity - * providers. Consult the documentation for the identity provider for refreshing - * tokens. Once the refreshed token is acquired, you should make sure to update - * this new token in the credentials object's {params} property. The following - * code will update the SAMLAssertion, assuming you have retrieved an updated - * token from the identity provider: - * - * ```javascript - * AWS.config.credentials.params.SAMLAssertion = updatedToken; - * ``` - * - * Future calls to `credentials.refresh()` will now use the new token. - * - * @!attribute params - * @return [map] the map of params passed to - * {AWS.STS.assumeRoleWithSAML}. To update the token, set the - * `params.SAMLAssertion` property. - */ -AWS.SAMLCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new credentials object. - * @param (see AWS.STS.assumeRoleWithSAML) - * @example Creating a new credentials object - * AWS.config.credentials = new AWS.SAMLCredentials({ - * RoleArn: 'arn:aws:iam::1234567890:role/SAMLRole', - * PrincipalArn: 'arn:aws:iam::1234567890:role/SAMLPrincipal', - * SAMLAssertion: 'base64-token', // base64-encoded token from IdP - * }); - * @see AWS.STS.assumeRoleWithSAML - */ - constructor: function SAMLCredentials(params) { - AWS.Credentials.call(this); - this.expired = true; - this.params = params; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODEJS_TIMEOUT_ERROR_CODES = exports.TRANSIENT_ERROR_STATUS_CODES = exports.TRANSIENT_ERROR_CODES = exports.THROTTLING_ERROR_CODES = exports.CLOCK_SKEW_ERROR_CODES = void 0; +exports.CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +exports.THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +exports.TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +exports.TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +exports.NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; - /** - * Refreshes credentials using {AWS.STS.assumeRoleWithSAML} - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, - /** - * @api private - */ - load: function load(callback) { - var self = this; - self.createClients(); - self.service.assumeRoleWithSAML(function (err, data) { - if (!err) { - self.service.credentialsFrom(data, self); - } - callback(err); - }); - }, +/***/ }), - /** - * @api private - */ - createClients: function() { - this.service = this.service || new STS({params: this.params}); - } +/***/ 6375: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isServerError = exports.isTransientError = exports.isThrottlingError = exports.isClockSkewError = exports.isRetryableByTrait = void 0; +const constants_1 = __nccwpck_require__(68415); +const isRetryableByTrait = (error) => error.$retryable !== undefined; +exports.isRetryableByTrait = isRetryableByTrait; +const isClockSkewError = (error) => constants_1.CLOCK_SKEW_ERROR_CODES.includes(error.name); +exports.isClockSkewError = isClockSkewError; +const isThrottlingError = (error) => { + var _a, _b; + return ((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) === 429 || + constants_1.THROTTLING_ERROR_CODES.includes(error.name) || + ((_b = error.$retryable) === null || _b === void 0 ? void 0 : _b.throttling) == true; +}; +exports.isThrottlingError = isThrottlingError; +const isTransientError = (error) => { + var _a; + return constants_1.TRANSIENT_ERROR_CODES.includes(error.name) || + constants_1.NODEJS_TIMEOUT_ERROR_CODES.includes((error === null || error === void 0 ? void 0 : error.code) || "") || + constants_1.TRANSIENT_ERROR_STATUS_CODES.includes(((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) || 0); +}; +exports.isTransientError = isTransientError; +const isServerError = (error) => { + var _a; + if (((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !(0, exports.isTransientError)(error)) { + return true; + } + return false; + } + return false; +}; +exports.isServerError = isServerError; /***/ }), -/***/ 13754: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 47237: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var STS = __nccwpck_require__(57513); -var iniLoader = AWS.util.iniLoader; +"use strict"; -var ASSUME_ROLE_DEFAULT_REGION = 'us-east-1'; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getConfigFilepath = exports.ENV_CONFIG_PATH = void 0; +const path_1 = __nccwpck_require__(71017); +const getHomeDir_1 = __nccwpck_require__(68340); +exports.ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +const getConfigFilepath = () => process.env[exports.ENV_CONFIG_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "config"); +exports.getConfigFilepath = getConfigFilepath; -/** - * Represents credentials loaded from shared credentials file - * (defaulting to ~/.aws/credentials or defined by the - * `AWS_SHARED_CREDENTIALS_FILE` environment variable). - * - * ## Using the shared credentials file - * - * This provider is checked by default in the Node.js environment. To use the - * credentials file provider, simply add your access and secret keys to the - * ~/.aws/credentials file in the following format: - * - * [default] - * aws_access_key_id = AKID... - * aws_secret_access_key = YOUR_SECRET_KEY - * - * ## Using custom profiles - * - * The SDK supports loading credentials for separate profiles. This can be done - * in two ways: - * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to - * loading the SDK. - * 2. Directly load the AWS.SharedIniFileCredentials provider: - * - * ```javascript - * var creds = new AWS.SharedIniFileCredentials({profile: 'myprofile'}); - * AWS.config.credentials = creds; - * ``` - * - * @!macro nobrowser - */ -AWS.SharedIniFileCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new SharedIniFileCredentials object. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var) - * the filename to use when loading credentials. - * @option options disableAssumeRole [Boolean] (false) True to disable - * support for profiles that assume an IAM role. If true, and an assume - * role profile is selected, an error is raised. - * @option options preferStaticCredentials [Boolean] (false) True to - * prefer static credentials to role_arn if both are present. - * @option options tokenCodeFn [Function] (null) Function to provide - * STS Assume Role TokenCode, if mfa_serial is provided for profile in ini - * file. Function is called with value of mfa_serial and callback, and - * should provide the TokenCode or an error to the callback in the format - * callback(err, token) - * @option options callback [Function] (err) Credentials are eagerly loaded - * by the constructor. When the callback is called with no error, the - * credentials have been loaded successfully. - * @option options httpOptions [map] A set of options to pass to the low-level - * HTTP request. Currently supported options are: - * * **proxy** [String] — the URL to proxy requests through - * * **agent** [http.Agent, https.Agent] — the Agent object to perform - * HTTP requests with. Used for connection pooling. Defaults to the global - * agent (`http.globalAgent`) for non-SSL connections. Note that for - * SSL connections, a special Agent object is used in order to enable - * peer certificate verification. This feature is only available in the - * Node.js environment. - * * **connectTimeout** [Integer] — Sets the socket to timeout after - * failing to establish a connection with the server after - * `connectTimeout` milliseconds. This timeout has no effect once a socket - * connection has been established. - * * **timeout** [Integer] — The number of milliseconds a request can - * take before automatically being terminated. - * Defaults to two minutes (120000). - */ - constructor: function SharedIniFileCredentials(options) { - AWS.Credentials.call(this); - - options = options || {}; - - this.filename = options.filename; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.disableAssumeRole = Boolean(options.disableAssumeRole); - this.preferStaticCredentials = Boolean(options.preferStaticCredentials); - this.tokenCodeFn = options.tokenCodeFn || null; - this.httpOptions = options.httpOptions || null; - this.get(options.callback || AWS.util.fn.noop); - }, - /** - * @api private - */ - load: function load(callback) { - var self = this; - try { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' not found'), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } +/***/ }), - /* - In the CLI, the presence of both a role_arn and static credentials have - different meanings depending on how many profiles have been visited. For - the first profile processed, role_arn takes precedence over any static - credentials, but for all subsequent profiles, static credentials are - used if present, and only in their absence will the profile's - source_profile and role_arn keys be used to load another set of - credentials. This var is intended to yield compatible behaviour in this - sdk. - */ - var preferStaticCredentialsToRoleArn = Boolean( - this.preferStaticCredentials - && profile['aws_access_key_id'] - && profile['aws_secret_access_key'] - ); - - if (profile['role_arn'] && !preferStaticCredentialsToRoleArn) { - this.loadRoleProfile(profiles, profile, function(err, data) { - if (err) { - callback(err); - } else { - self.expired = false; - self.accessKeyId = data.Credentials.AccessKeyId; - self.secretAccessKey = data.Credentials.SecretAccessKey; - self.sessionToken = data.Credentials.SessionToken; - self.expireTime = data.Credentials.Expiration; - callback(null); - } - }); - return; - } +/***/ 99036: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this.accessKeyId = profile['aws_access_key_id']; - this.secretAccessKey = profile['aws_secret_access_key']; - this.sessionToken = profile['aws_session_token']; +"use strict"; - if (!this.accessKeyId || !this.secretAccessKey) { - throw AWS.util.error( - new Error('Credentials not set for profile ' + this.profile), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - this.expired = false; - callback(null); - } catch (err) { - callback(err); - } - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCredentialsFilepath = exports.ENV_CREDENTIALS_PATH = void 0; +const path_1 = __nccwpck_require__(71017); +const getHomeDir_1 = __nccwpck_require__(68340); +exports.ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +const getCredentialsFilepath = () => process.env[exports.ENV_CREDENTIALS_PATH] || (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "credentials"); +exports.getCredentialsFilepath = getCredentialsFilepath; - /** - * Loads the credentials from the shared credentials file - * - * @callback callback function(err) - * Called after the shared INI file on disk is read and parsed. When this - * callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh( - callback || AWS.util.fn.callback, - this.disableAssumeRole - ); - }, - /** - * @api private - */ - loadRoleProfile: function loadRoleProfile(creds, roleProfile, callback) { - if (this.disableAssumeRole) { - throw AWS.util.error( - new Error('Role assumption profiles are disabled. ' + - 'Failed to load profile ' + this.profile + - ' from ' + creds.filename), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - - var self = this; - var roleArn = roleProfile['role_arn']; - var roleSessionName = roleProfile['role_session_name']; - var externalId = roleProfile['external_id']; - var mfaSerial = roleProfile['mfa_serial']; - var sourceProfileName = roleProfile['source_profile']; - - // From experimentation, the following behavior mimics the AWS CLI: - // - // 1. Use region from the profile if present. - // 2. Otherwise fall back to N. Virginia (global endpoint). - // - // It is necessary to do the fallback explicitly, because if - // 'AWS_STS_REGIONAL_ENDPOINTS=regional', the underlying STS client will - // otherwise throw an error if region is left 'undefined'. - // - // Experimentation shows that the AWS CLI (tested at version 1.18.136) - // ignores the following potential sources of a region for the purposes of - // this AssumeRole call: - // - // - The [default] profile - // - The AWS_REGION environment variable - // - // Ignoring the [default] profile for the purposes of AssumeRole is arguably - // a bug in the CLI since it does use the [default] region for service - // calls... but right now we're matching behavior of the other tool. - var profileRegion = roleProfile['region'] || ASSUME_ROLE_DEFAULT_REGION; - - if (!sourceProfileName) { - throw AWS.util.error( - new Error('source_profile is not set using profile ' + this.profile), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - - var sourceProfileExistanceTest = creds[sourceProfileName]; - - if (typeof sourceProfileExistanceTest !== 'object') { - throw AWS.util.error( - new Error('source_profile ' + sourceProfileName + ' using profile ' - + this.profile + ' does not exist'), - { code: 'SharedIniFileCredentialsProviderFailure' } - ); - } - - var sourceCredentials = new AWS.SharedIniFileCredentials( - AWS.util.merge(this.options || {}, { - profile: sourceProfileName, - preferStaticCredentials: true - }) - ); - - this.roleArn = roleArn; - var sts = new STS({ - credentials: sourceCredentials, - region: profileRegion, - httpOptions: this.httpOptions - }); +/***/ }), - var roleParams = { - RoleArn: roleArn, - RoleSessionName: roleSessionName || 'aws-sdk-js-' + Date.now() - }; +/***/ 68340: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (externalId) { - roleParams.ExternalId = externalId; - } +"use strict"; - if (mfaSerial && self.tokenCodeFn) { - roleParams.SerialNumber = mfaSerial; - self.tokenCodeFn(mfaSerial, function(err, token) { - if (err) { - var message; - if (err instanceof Error) { - message = err.message; - } else { - message = err; - } - callback( - AWS.util.error( - new Error('Error fetching MFA token: ' + message), - { code: 'SharedIniFileCredentialsProviderFailure' } - )); - return; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHomeDir = void 0; +const os_1 = __nccwpck_require__(22037); +const path_1 = __nccwpck_require__(71017); +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + return (0, os_1.homedir)(); +}; +exports.getHomeDir = getHomeDir; - roleParams.TokenCode = token; - sts.assumeRole(roleParams, callback); - }); - return; - } - sts.assumeRole(roleParams, callback); - } + +/***/ }), + +/***/ 32041: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProfileData = void 0; +const profileKeyRegex = /^profile\s(["'])?([^\1]+)\1$/; +const getProfileData = (data) => Object.entries(data) + .filter(([key]) => profileKeyRegex.test(key)) + .reduce((acc, [key, value]) => ({ ...acc, [profileKeyRegex.exec(key)[2]]: value }), { + ...(data.default && { default: data.default }), }); +exports.getProfileData = getProfileData; /***/ }), -/***/ 68335: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var path = __nccwpck_require__(71017); -var crypto = __nccwpck_require__(6113); -var iniLoader = AWS.util.iniLoader; +/***/ 52802: +/***/ ((__unused_webpack_module, exports) => { -/** - * Represents credentials from sso.getRoleCredentials API for - * `sso_*` values defined in shared credentials file. - * - * ## Using SSO credentials - * - * The credentials file must specify the information below to use sso: - * - * [profile sso-profile] - * sso_account_id = 012345678901 - * sso_region = **-****-* - * sso_role_name = SampleRole - * sso_start_url = https://d-******.awsapps.com/start - * - * or using the session format: - * - * [profile sso-token] - * sso_session = prod - * sso_account_id = 012345678901 - * sso_role_name = SampleRole - * - * [sso-session prod] - * sso_region = **-****-* - * sso_start_url = https://d-******.awsapps.com/start - * - * This information will be automatically added to your shared credentials file by running - * `aws configure sso`. - * - * ## Using custom profiles - * - * The SDK supports loading credentials for separate profiles. This can be done - * in two ways: - * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to - * loading the SDK. - * 2. Directly load the AWS.SsoCredentials provider: - * - * ```javascript - * var creds = new AWS.SsoCredentials({profile: 'myprofile'}); - * AWS.config.credentials = creds; - * ``` - * - * @!macro nobrowser - */ -AWS.SsoCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new SsoCredentials object. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var) - * the filename to use when loading credentials. - * @option options callback [Function] (err) Credentials are eagerly loaded - * by the constructor. When the callback is called with no error, the - * credentials have been loaded successfully. - */ - constructor: function SsoCredentials(options) { - AWS.Credentials.call(this); - - options = options || {}; - this.errorCode = 'SsoCredentialsProviderFailure'; - this.expired = true; - - this.filename = options.filename; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.service = options.ssoClient; - this.httpOptions = options.httpOptions || null; - this.get(options.callback || AWS.util.fn.noop); - }, +"use strict"; - /** - * @api private - */ - load: function load(callback) { - var self = this; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProfileName = exports.DEFAULT_PROFILE = exports.ENV_PROFILE = void 0; +exports.ENV_PROFILE = "AWS_PROFILE"; +exports.DEFAULT_PROFILE = "default"; +const getProfileName = (init) => init.profile || process.env[exports.ENV_PROFILE] || exports.DEFAULT_PROFILE; +exports.getProfileName = getProfileName; - try { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' not found'), - { code: self.errorCode } - ); - } - if (profile.sso_session) { - if (!profile.sso_account_id || !profile.sso_role_name) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' with session ' + profile.sso_session + - ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_session", ' + - '"sso_role_name". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), - { code: self.errorCode } - ); - } - } else { - if (!profile.sso_start_url || !profile.sso_account_id || !profile.sso_region || !profile.sso_role_name) { - throw AWS.util.error( - new Error('Profile ' + this.profile + ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_region", ' + - '"sso_role_name", "sso_start_url". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), - { code: self.errorCode } - ); - } - } +/***/ }), - this.getToken(this.profile, profile, function (err, token) { - if (err) { - return callback(err); - } - var request = { - accessToken: token, - accountId: profile.sso_account_id, - roleName: profile.sso_role_name, - }; +/***/ 24740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!self.service || self.service.config.region !== profile.sso_region) { - self.service = new AWS.SSO({ - region: profile.sso_region, - httpOptions: self.httpOptions, - }); - } - - self.service.getRoleCredentials(request, function(err, data) { - if (err || !data || !data.roleCredentials) { - callback(AWS.util.error( - err || new Error('Please log in using "aws sso login"'), - { code: self.errorCode } - ), null); - } else if (!data.roleCredentials.accessKeyId || !data.roleCredentials.secretAccessKey || !data.roleCredentials.sessionToken || !data.roleCredentials.expiration) { - throw AWS.util.error(new Error( - 'SSO returns an invalid temporary credential.' - )); - } else { - self.expired = false; - self.accessKeyId = data.roleCredentials.accessKeyId; - self.secretAccessKey = data.roleCredentials.secretAccessKey; - self.sessionToken = data.roleCredentials.sessionToken; - self.expireTime = new Date(data.roleCredentials.expiration); - callback(null); - } - }); - }); - } catch (err) { - callback(err); - } - }, +"use strict"; - /** - * @private - * Uses legacy file system retrieval or if sso-session is set, - * use the SSOTokenProvider. - * - * @param {string} profileName - name of the profile. - * @param {object} profile - profile data containing sso_session or sso_start_url etc. - * @param {function} callback - called with (err, (string) token). - * - * @returns {void} - */ - getToken: function getToken(profileName, profile, callback) { - var self = this; - - if (profile.sso_session) { - var _iniLoader = AWS.util.iniLoader; - var ssoSessions = _iniLoader.loadSsoSessionsFrom(); - var ssoSession = ssoSessions[profile.sso_session]; - Object.assign(profile, ssoSession); - - var ssoTokenProvider = new AWS.SSOTokenProvider({ - profile: profileName, - }); - ssoTokenProvider.load(function (err) { - if (err) { - return callback(err); - } - return callback(null, ssoTokenProvider.token); - }); - return; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = __nccwpck_require__(6113); +const path_1 = __nccwpck_require__(71017); +const getHomeDir_1 = __nccwpck_require__(68340); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; - try { - /** - * The time window (15 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. - * This is needed because server side may have invalidated the token before the defined expiration date. - */ - var EXPIRE_WINDOW_MS = 15 * 60 * 1000; - var hasher = crypto.createHash('sha1'); - var fileName = hasher.update(profile.sso_start_url).digest('hex') + '.json'; - var cachePath = path.join( - iniLoader.getHomeDir(), - '.aws', - 'sso', - 'cache', - fileName - ); - var cacheFile = AWS.util.readFileSync(cachePath); - var cacheContent = null; - if (cacheFile) { - cacheContent = JSON.parse(cacheFile); - } - if (!cacheContent) { - throw AWS.util.error( - new Error('Cached credentials not found under ' + this.profile + ' profile. Please make sure you log in with aws sso login first'), - { code: self.errorCode } - ); - } - if (!cacheContent.startUrl || !cacheContent.region || !cacheContent.accessToken || !cacheContent.expiresAt) { - throw AWS.util.error( - new Error('Cached credentials are missing required properties. Try running aws sso login.') - ); - } +/***/ }), - if (new Date(cacheContent.expiresAt).getTime() - Date.now() <= EXPIRE_WINDOW_MS) { - throw AWS.util.error(new Error( - 'The SSO session associated with this profile has expired. To refresh this SSO session run aws sso login with the corresponding profile.' - )); - } +/***/ 69678: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return callback(null, cacheContent.accessToken); - } catch (err) { - return callback(err, null); - } - }, +"use strict"; - /** - * Loads the credentials from the AWS SSO process - * - * @callback callback function(err) - * Called after the AWS SSO process has been executed. When this - * callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, -}); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFromFile = void 0; +const fs_1 = __nccwpck_require__(57147); +const getSSOTokenFilepath_1 = __nccwpck_require__(24740); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; /***/ }), -/***/ 77360: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 82820: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -var STS = __nccwpck_require__(57513); +"use strict"; -/** - * Represents temporary credentials retrieved from {AWS.STS}. Without any - * extra parameters, credentials will be fetched from the - * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the - * {AWS.STS.assumeRole} operation will be used to fetch credentials for the - * role instead. - * - * @note AWS.TemporaryCredentials is deprecated, but remains available for - * backwards compatibility. {AWS.ChainableTemporaryCredentials} is the - * preferred class for temporary credentials. - * - * To setup temporary credentials, configure a set of master credentials - * using the standard credentials providers (environment, EC2 instance metadata, - * or from the filesystem), then set the global credentials to a new - * temporary credentials object: - * - * ```javascript - * // Note that environment credentials are loaded by default, - * // the following line is shown for clarity: - * AWS.config.credentials = new AWS.EnvironmentCredentials('AWS'); - * - * // Now set temporary credentials seeded from the master credentials - * AWS.config.credentials = new AWS.TemporaryCredentials(); - * - * // subsequent requests will now use temporary credentials from AWS STS. - * new AWS.S3().listBucket(function(err, data) { ... }); - * ``` - * - * @!attribute masterCredentials - * @return [AWS.Credentials] the master (non-temporary) credentials used to - * get and refresh temporary credentials from AWS STS. - * @note (see constructor) - */ -AWS.TemporaryCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new temporary credentials object. - * - * @note In order to create temporary credentials, you first need to have - * "master" credentials configured in {AWS.Config.credentials}. These - * master credentials are necessary to retrieve the temporary credentials, - * as well as refresh the credentials when they expire. - * @param params [map] a map of options that are passed to the - * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. - * If a `RoleArn` parameter is passed in, credentials will be based on the - * IAM role. - * @param masterCredentials [AWS.Credentials] the master (non-temporary) credentials - * used to get and refresh temporary credentials from AWS STS. - * @example Creating a new credentials object for generic temporary credentials - * AWS.config.credentials = new AWS.TemporaryCredentials(); - * @example Creating a new credentials object for an IAM role - * AWS.config.credentials = new AWS.TemporaryCredentials({ - * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials', - * }); - * @see AWS.STS.assumeRole - * @see AWS.STS.getSessionToken - */ - constructor: function TemporaryCredentials(params, masterCredentials) { - AWS.Credentials.call(this); - this.loadMasterCredentials(masterCredentials); - this.expired = true; - - this.params = params || {}; - if (this.params.RoleArn) { - this.params.RoleSessionName = - this.params.RoleSessionName || 'temporary-credentials'; - } - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSsoSessionData = void 0; +const ssoSessionKeyRegex = /^sso-session\s(["'])?([^\1]+)\1$/; +const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => ssoSessionKeyRegex.test(key)) + .reduce((acc, [key, value]) => ({ ...acc, [ssoSessionKeyRegex.exec(key)[2]]: value }), {}); +exports.getSsoSessionData = getSsoSessionData; - /** - * Refreshes credentials using {AWS.STS.assumeRole} or - * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed - * to the credentials {constructor}. - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh (callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, - /** - * @api private - */ - load: function load (callback) { - var self = this; - self.createClients(); - self.masterCredentials.get(function () { - self.service.config.credentials = self.masterCredentials; - var operation = self.params.RoleArn ? - self.service.assumeRole : self.service.getSessionToken; - operation.call(self.service, function (err, data) { - if (!err) { - self.service.credentialsFrom(data, self); - } - callback(err); - }); - }); - }, +/***/ }), - /** - * @api private - */ - loadMasterCredentials: function loadMasterCredentials (masterCredentials) { - this.masterCredentials = masterCredentials || AWS.config.credentials; - while (this.masterCredentials.masterCredentials) { - this.masterCredentials = this.masterCredentials.masterCredentials; - } +/***/ 43507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (typeof this.masterCredentials.get !== 'function') { - this.masterCredentials = new AWS.Credentials(this.masterCredentials); - } - }, +"use strict"; - /** - * @api private - */ - createClients: function () { - this.service = this.service || new STS({params: this.params}); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(68340), exports); +tslib_1.__exportStar(__nccwpck_require__(52802), exports); +tslib_1.__exportStar(__nccwpck_require__(24740), exports); +tslib_1.__exportStar(__nccwpck_require__(69678), exports); +tslib_1.__exportStar(__nccwpck_require__(41879), exports); +tslib_1.__exportStar(__nccwpck_require__(34649), exports); +tslib_1.__exportStar(__nccwpck_require__(2546), exports); +tslib_1.__exportStar(__nccwpck_require__(63191), exports); -}); + +/***/ }), + +/***/ 41879: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadSharedConfigFiles = void 0; +const getConfigFilepath_1 = __nccwpck_require__(47237); +const getCredentialsFilepath_1 = __nccwpck_require__(99036); +const getProfileData_1 = __nccwpck_require__(32041); +const parseIni_1 = __nccwpck_require__(54262); +const slurpFile_1 = __nccwpck_require__(19155); +const swallowError = () => ({}); +const loadSharedConfigFiles = async (init = {}) => { + const { filepath = (0, getCredentialsFilepath_1.getCredentialsFilepath)(), configFilepath = (0, getConfigFilepath_1.getConfigFilepath)() } = init; + const parsedFiles = await Promise.all([ + (0, slurpFile_1.slurpFile)(configFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni_1.parseIni) + .then(getProfileData_1.getProfileData) + .catch(swallowError), + (0, slurpFile_1.slurpFile)(filepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni_1.parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; +exports.loadSharedConfigFiles = loadSharedConfigFiles; /***/ }), -/***/ 11017: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 34649: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var fs = __nccwpck_require__(57147); -var STS = __nccwpck_require__(57513); -var iniLoader = AWS.util.iniLoader; +"use strict"; -/** - * Represents OIDC credentials from a file on disk - * If the credentials expire, the SDK can {refresh} the credentials - * from the file. - * - * ## Using the web identity token file - * - * This provider is checked by default in the Node.js environment. To use - * the provider simply add your OIDC token to a file (ASCII encoding) and - * share the filename in either AWS_WEB_IDENTITY_TOKEN_FILE environment - * variable or web_identity_token_file shared config variable - * - * The file contains encoded OIDC token and the characters are - * ASCII encoded. OIDC tokens are JSON Web Tokens (JWT). - * JWT's are 3 base64 encoded strings joined by the '.' character. - * - * This class will read filename from AWS_WEB_IDENTITY_TOKEN_FILE - * environment variable or web_identity_token_file shared config variable, - * and get the OIDC token from filename. - * It will also read IAM role to be assumed from AWS_ROLE_ARN - * environment variable or role_arn shared config variable. - * This provider gets credetials using the {AWS.STS.assumeRoleWithWebIdentity} - * service operation - * - * @!macro nobrowser - */ -AWS.TokenFileWebIdentityCredentials = AWS.util.inherit(AWS.Credentials, { - - /** - * @example Creating a new credentials object - * AWS.config.credentials = new AWS.TokenFileWebIdentityCredentials( - * // optionally provide configuration to apply to the underlying AWS.STS service client - * // if configuration is not provided, then configuration will be pulled from AWS.config - * { - * // specify timeout options - * httpOptions: { - * timeout: 100 - * } - * }); - * @see AWS.Config - */ - constructor: function TokenFileWebIdentityCredentials(clientConfig) { - AWS.Credentials.call(this); - this.data = null; - this.clientConfig = AWS.util.copy(clientConfig || {}); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadSsoSessionData = void 0; +const getConfigFilepath_1 = __nccwpck_require__(47237); +const getSsoSessionData_1 = __nccwpck_require__(82820); +const parseIni_1 = __nccwpck_require__(54262); +const slurpFile_1 = __nccwpck_require__(19155); +const swallowError = () => ({}); +const loadSsoSessionData = async (init = {}) => { + var _a; + return (0, slurpFile_1.slurpFile)((_a = init.configFilepath) !== null && _a !== void 0 ? _a : (0, getConfigFilepath_1.getConfigFilepath)()) + .then(parseIni_1.parseIni) + .then(getSsoSessionData_1.getSsoSessionData) + .catch(swallowError); +}; +exports.loadSsoSessionData = loadSsoSessionData; - /** - * Returns params from environment variables - * - * @api private - */ - getParamsFromEnv: function getParamsFromEnv() { - var ENV_TOKEN_FILE = 'AWS_WEB_IDENTITY_TOKEN_FILE', - ENV_ROLE_ARN = 'AWS_ROLE_ARN'; - if (process.env[ENV_TOKEN_FILE] && process.env[ENV_ROLE_ARN]) { - return [{ - envTokenFile: process.env[ENV_TOKEN_FILE], - roleArn: process.env[ENV_ROLE_ARN], - roleSessionName: process.env['AWS_ROLE_SESSION_NAME'] - }]; - } - }, - /** - * Returns params from shared config variables - * - * @api private - */ - getParamsFromSharedConfig: function getParamsFromSharedConfig() { - var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader); - var profileName = process.env.AWS_PROFILE || AWS.util.defaultProfile; - var profile = profiles[profileName] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile ' + profileName + ' not found'), - { code: 'TokenFileWebIdentityCredentialsProviderFailure' } - ); - } - - var paramsArray = []; - - while (!profile['web_identity_token_file'] && profile['source_profile']) { - paramsArray.unshift({ - roleArn: profile['role_arn'], - roleSessionName: profile['role_session_name'] - }); - var sourceProfile = profile['source_profile']; - profile = profiles[sourceProfile]; - } - - paramsArray.unshift({ - envTokenFile: profile['web_identity_token_file'], - roleArn: profile['role_arn'], - roleSessionName: profile['role_session_name'] - }); +/***/ }), - return paramsArray; - }, +/***/ 19447: +/***/ ((__unused_webpack_module, exports) => { - /** - * Refreshes credentials using {AWS.STS.assumeRoleWithWebIdentity} - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see AWS.Credentials.get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, +"use strict"; - /** - * @api private - */ - assumeRoleChaining: function assumeRoleChaining(paramsArray, callback) { - var self = this; - if (paramsArray.length === 0) { - self.service.credentialsFrom(self.data, self); - callback(); - } else { - var params = paramsArray.shift(); - self.service.config.credentials = self.service.credentialsFrom(self.data, self); - self.service.assumeRole( - { - RoleArn: params.roleArn, - RoleSessionName: params.roleSessionName || 'token-file-web-identity' - }, - function (err, data) { - self.data = null; - if (err) { - callback(err); - } else { - self.data = data; - self.assumeRoleChaining(paramsArray, callback); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mergeConfigFiles = void 0; +const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } } - ); } - }, + return merged; +}; +exports.mergeConfigFiles = mergeConfigFiles; - /** - * @api private - */ - load: function load(callback) { - var self = this; - try { - var paramsArray = self.getParamsFromEnv(); - if (!paramsArray) { - paramsArray = self.getParamsFromSharedConfig(); - } - if (paramsArray) { - var params = paramsArray.shift(); - var oidcToken = fs.readFileSync(params.envTokenFile, {encoding: 'ascii'}); - if (!self.service) { - self.createClients(); - } - self.service.assumeRoleWithWebIdentity( - { - WebIdentityToken: oidcToken, - RoleArn: params.roleArn, - RoleSessionName: params.roleSessionName || 'token-file-web-identity' - }, - function (err, data) { - self.data = null; - if (err) { - callback(err); - } else { - self.data = data; - self.assumeRoleChaining(paramsArray, callback); - } - } - ); - } - } catch (err) { - callback(err); - } - }, - /** - * @api private - */ - createClients: function() { - if (!this.service) { - var stsConfig = AWS.util.merge({}, this.clientConfig); - this.service = new STS(stsConfig); - - // Retry in case of IDPCommunicationErrorException or InvalidIdentityToken - this.service.retryableError = function(error) { - if (error.code === 'IDPCommunicationErrorException' || error.code === 'InvalidIdentityToken') { - return true; - } else { - return AWS.Service.prototype.retryableError.call(this, error); +/***/ }), + +/***/ 54262: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseIni = void 0; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +const parseIni = (iniData) => { + const map = {}; + let currentSection; + for (let line of iniData.split(/\r?\n/)) { + line = line.split(/(^|\s)[;#]/)[0].trim(); + const isSection = line[0] === "[" && line[line.length - 1] === "]"; + if (isSection) { + currentSection = line.substring(1, line.length - 1); + if (profileNameBlockList.includes(currentSection)) { + throw new Error(`Found invalid profile name "${currentSection}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = line.indexOf("="); + const start = 0; + const end = line.length - 1; + const isAssignment = indexOfEqualsSign !== -1 && indexOfEqualsSign !== start && indexOfEqualsSign !== end; + if (isAssignment) { + const [name, value] = [ + line.substring(0, indexOfEqualsSign).trim(), + line.substring(indexOfEqualsSign + 1).trim(), + ]; + map[currentSection] = map[currentSection] || {}; + map[currentSection][name] = value; + } } - }; } - } -}); + return map; +}; +exports.parseIni = parseIni; /***/ }), -/***/ 74998: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2546: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var STS = __nccwpck_require__(57513); +"use strict"; -/** - * Represents credentials retrieved from STS Web Identity Federation support. - * - * By default this provider gets credentials using the - * {AWS.STS.assumeRoleWithWebIdentity} service operation. This operation - * requires a `RoleArn` containing the ARN of the IAM trust policy for the - * application for which credentials will be given. In addition, the - * `WebIdentityToken` must be set to the token provided by the identity - * provider. See {constructor} for an example on creating a credentials - * object with proper `RoleArn` and `WebIdentityToken` values. - * - * ## Refreshing Credentials from Identity Service - * - * In addition to AWS credentials expiring after a given amount of time, the - * login token from the identity provider will also expire. Once this token - * expires, it will not be usable to refresh AWS credentials, and another - * token will be needed. The SDK does not manage refreshing of the token value, - * but this can be done through a "refresh token" supported by most identity - * providers. Consult the documentation for the identity provider for refreshing - * tokens. Once the refreshed token is acquired, you should make sure to update - * this new token in the credentials object's {params} property. The following - * code will update the WebIdentityToken, assuming you have retrieved an updated - * token from the identity provider: - * - * ```javascript - * AWS.config.credentials.params.WebIdentityToken = updatedToken; - * ``` - * - * Future calls to `credentials.refresh()` will now use the new token. - * - * @!attribute params - * @return [map] the map of params passed to - * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the - * `params.WebIdentityToken` property. - * @!attribute data - * @return [map] the raw data response from the call to - * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get - * access to other properties from the response. - */ -AWS.WebIdentityCredentials = AWS.util.inherit(AWS.Credentials, { - /** - * Creates a new credentials object. - * @param (see AWS.STS.assumeRoleWithWebIdentity) - * @example Creating a new credentials object - * AWS.config.credentials = new AWS.WebIdentityCredentials({ - * RoleArn: 'arn:aws:iam::1234567890:role/WebIdentity', - * WebIdentityToken: 'ABCDEFGHIJKLMNOP', // token from identity service - * RoleSessionName: 'web' // optional name, defaults to web-identity - * }, { - * // optionally provide configuration to apply to the underlying AWS.STS service client - * // if configuration is not provided, then configuration will be pulled from AWS.config - * - * // specify timeout options - * httpOptions: { - * timeout: 100 - * } - * }); - * @see AWS.STS.assumeRoleWithWebIdentity - * @see AWS.Config - */ - constructor: function WebIdentityCredentials(params, clientConfig) { - AWS.Credentials.call(this); - this.expired = true; - this.params = params; - this.params.RoleSessionName = this.params.RoleSessionName || 'web-identity'; - this.data = null; - this._clientConfig = AWS.util.copy(clientConfig || {}); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseKnownFiles = void 0; +const loadSharedConfigFiles_1 = __nccwpck_require__(41879); +const mergeConfigFiles_1 = __nccwpck_require__(19447); +const parseKnownFiles = async (init) => { + const parsedFiles = await (0, loadSharedConfigFiles_1.loadSharedConfigFiles)(init); + return (0, mergeConfigFiles_1.mergeConfigFiles)(parsedFiles.configFile, parsedFiles.credentialsFile); +}; +exports.parseKnownFiles = parseKnownFiles; - /** - * Refreshes credentials using {AWS.STS.assumeRoleWithWebIdentity} - * - * @callback callback function(err) - * Called when the STS service responds (or fails). When - * this callback is called with no error, it means that the credentials - * information has been loaded into the object (as the `accessKeyId`, - * `secretAccessKey`, and `sessionToken` properties). - * @param err [Error] if an error occurred, this value will be filled - * @see get - */ - refresh: function refresh(callback) { - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, - /** - * @api private - */ - load: function load(callback) { - var self = this; - self.createClients(); - self.service.assumeRoleWithWebIdentity(function (err, data) { - self.data = null; - if (!err) { - self.data = data; - self.service.credentialsFrom(data, self); - } - callback(err); - }); - }, +/***/ }), - /** - * @api private - */ - createClients: function() { - if (!this.service) { - var stsConfig = AWS.util.merge({}, this._clientConfig); - stsConfig.params = this.params; - this.service = new STS(stsConfig); - } - } +/***/ 19155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.slurpFile = void 0; +const fs_1 = __nccwpck_require__(57147); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; /***/ }), -/***/ 45313: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 63191: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -var util = __nccwpck_require__(77985); -var endpointDiscoveryEnabledEnvs = ['AWS_ENABLE_ENDPOINT_DISCOVERY', 'AWS_ENDPOINT_DISCOVERY_ENABLED']; +"use strict"; -/** - * Generate key (except resources and operation part) to index the endpoints in the cache - * If input shape has endpointdiscoveryid trait then use - * accessKey + operation + resources + region + service as cache key - * If input shape doesn't have endpointdiscoveryid trait then use - * accessKey + region + service as cache key - * @return [map] object with keys to index endpoints. - * @api private - */ -function getCacheKey(request) { - var service = request.service; - var api = service.api || {}; - var operations = api.operations; - var identifiers = {}; - if (service.config.region) { - identifiers.region = service.config.region; - } - if (api.serviceId) { - identifiers.serviceId = api.serviceId; - } - if (service.config.credentials.accessKeyId) { - identifiers.accessKeyId = service.config.credentials.accessKeyId; - } - return identifiers; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * Recursive helper for marshallCustomIdentifiers(). - * Looks for required string input members that have 'endpointdiscoveryid' trait. - * @api private - */ -function marshallCustomIdentifiersHelper(result, params, shape) { - if (!shape || params === undefined || params === null) return; - if (shape.type === 'structure' && shape.required && shape.required.length > 0) { - util.arrayEach(shape.required, function(name) { - var memberShape = shape.members[name]; - if (memberShape.endpointDiscoveryId === true) { - var locationName = memberShape.isLocationName ? memberShape.name : name; - result[locationName] = String(params[name]); - } else { - marshallCustomIdentifiersHelper(result, params[name], memberShape); - } - }); - } -} -/** - * Get custom identifiers for cache key. - * Identifies custom identifiers by checking each shape's `endpointDiscoveryId` trait. - * @param [object] request object - * @param [object] input shape of the given operation's api - * @api private - */ -function marshallCustomIdentifiers(request, shape) { - var identifiers = {}; - marshallCustomIdentifiersHelper(identifiers, request.params, shape); - return identifiers; -} +/***/ }), -/** - * Call endpoint discovery operation when it's optional. - * When endpoint is available in cache then use the cached endpoints. If endpoints - * are unavailable then use regional endpoints and call endpoint discovery operation - * asynchronously. This is turned off by default. - * @param [object] request object - * @api private - */ -function optionalDiscoverEndpoint(request) { - var service = request.service; - var api = service.api; - var operationModel = api.operations ? api.operations[request.operation] : undefined; - var inputShape = operationModel ? operationModel.input : undefined; - - var identifiers = marshallCustomIdentifiers(request, inputShape); - var cacheKey = getCacheKey(request); - if (Object.keys(identifiers).length > 0) { - cacheKey = util.update(cacheKey, identifiers); - if (operationModel) cacheKey.operation = operationModel.name; - } - var endpoints = AWS.endpointCache.get(cacheKey); - if (endpoints && endpoints.length === 1 && endpoints[0].Address === '') { - //endpoint operation is being made but response not yet received - //or endpoint operation just failed in 1 minute - return; - } else if (endpoints && endpoints.length > 0) { - //found endpoint record from cache - request.httpRequest.updateEndpoint(endpoints[0].Address); - } else { - //endpoint record not in cache or outdated. make discovery operation - var endpointRequest = service.makeRequest(api.endpointOperation, { - Operation: operationModel.name, - Identifiers: identifiers, - }); - addApiVersionHeader(endpointRequest); - endpointRequest.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); - endpointRequest.removeListener('retry', AWS.EventListeners.Core.RETRY_CHECK); - //put in a placeholder for endpoints already requested, prevent - //too much in-flight calls - AWS.endpointCache.put(cacheKey, [{ - Address: '', - CachePeriodInMinutes: 1 - }]); - endpointRequest.send(function(err, data) { - if (data && data.Endpoints) { - AWS.endpointCache.put(cacheKey, data.Endpoints); - } else if (err) { - AWS.endpointCache.put(cacheKey, [{ - Address: '', - CachePeriodInMinutes: 1 //not to make more endpoint operation in next 1 minute - }]); - } - }); - } -} +/***/ 39733: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var requestQueue = {}; +"use strict"; -/** - * Call endpoint discovery operation when it's required. - * When endpoint is available in cache then use cached ones. If endpoints are - * unavailable then SDK should call endpoint operation then use returned new - * endpoint for the api call. SDK will automatically attempt to do endpoint - * discovery. This is turned off by default - * @param [object] request object - * @api private - */ -function requiredDiscoverEndpoint(request, done) { - var service = request.service; - var api = service.api; - var operationModel = api.operations ? api.operations[request.operation] : undefined; - var inputShape = operationModel ? operationModel.input : undefined; - - var identifiers = marshallCustomIdentifiers(request, inputShape); - var cacheKey = getCacheKey(request); - if (Object.keys(identifiers).length > 0) { - cacheKey = util.update(cacheKey, identifiers); - if (operationModel) cacheKey.operation = operationModel.name; - } - var cacheKeyStr = AWS.EndpointCache.getKeyString(cacheKey); - var endpoints = AWS.endpointCache.get(cacheKeyStr); //endpoint cache also accepts string keys - if (endpoints && endpoints.length === 1 && endpoints[0].Address === '') { - //endpoint operation is being made but response not yet received - //push request object to a pending queue - if (!requestQueue[cacheKeyStr]) requestQueue[cacheKeyStr] = []; - requestQueue[cacheKeyStr].push({request: request, callback: done}); - return; - } else if (endpoints && endpoints.length > 0) { - request.httpRequest.updateEndpoint(endpoints[0].Address); - done(); - } else { - var endpointRequest = service.makeRequest(api.endpointOperation, { - Operation: operationModel.name, - Identifiers: identifiers, - }); - endpointRequest.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); - addApiVersionHeader(endpointRequest); - - //put in a placeholder for endpoints already requested, prevent - //too much in-flight calls - AWS.endpointCache.put(cacheKeyStr, [{ - Address: '', - CachePeriodInMinutes: 60 //long-live cache - }]); - endpointRequest.send(function(err, data) { - if (err) { - request.response.error = util.error(err, { retryable: false }); - AWS.endpointCache.remove(cacheKey); - - //fail all the pending requests in batch - if (requestQueue[cacheKeyStr]) { - var pendingRequests = requestQueue[cacheKeyStr]; - util.arrayEach(pendingRequests, function(requestContext) { - requestContext.request.response.error = util.error(err, { retryable: false }); - requestContext.callback(); - }); - delete requestQueue[cacheKeyStr]; - } - } else if (data) { - AWS.endpointCache.put(cacheKeyStr, data.Endpoints); - request.httpRequest.updateEndpoint(data.Endpoints[0].Address); - - //update the endpoint for all the pending requests in batch - if (requestQueue[cacheKeyStr]) { - var pendingRequests = requestQueue[cacheKeyStr]; - util.arrayEach(pendingRequests, function(requestContext) { - requestContext.request.httpRequest.updateEndpoint(data.Endpoints[0].Address); - requestContext.callback(); - }); - delete requestQueue[cacheKeyStr]; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SignatureV4 = void 0; +const eventstream_codec_1 = __nccwpck_require__(56459); +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_middleware_1 = __nccwpck_require__(2390); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const credentialDerivation_1 = __nccwpck_require__(19623); +const getCanonicalHeaders_1 = __nccwpck_require__(51393); +const getCanonicalQuery_1 = __nccwpck_require__(33243); +const getPayloadHash_1 = __nccwpck_require__(48545); +const headerUtil_1 = __nccwpck_require__(62179); +const moveHeadersToQuery_1 = __nccwpck_require__(49828); +const prepareRequest_1 = __nccwpck_require__(60075); +const utilDate_1 = __nccwpck_require__(39299); +class SignatureV4 { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.headerMarshaller = new eventstream_codec_1.HeaderMarshaller(util_utf8_1.toUtf8, util_utf8_1.fromUtf8); + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, util_middleware_1.normalizeProvider)(region); + this.credentialProvider = (0, util_middleware_1.normalizeProvider)(credentials); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { longDate, shortDate } = formatDate(signingDate); + if (expiresIn > constants_1.MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const request = (0, moveHeadersToQuery_1.moveHeadersToQuery)((0, prepareRequest_1.prepareRequest)(originalRequest), { unhoistableHeaders }); + if (credentials.sessionToken) { + request.query[constants_1.TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[constants_1.ALGORITHM_QUERY_PARAM] = constants_1.ALGORITHM_IDENTIFIER; + request.query[constants_1.CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[constants_1.AMZ_DATE_QUERY_PARAM] = longDate; + request.query[constants_1.EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + request.query[constants_1.SIGNED_HEADERS_QUERY_PARAM] = getCanonicalHeaderList(canonicalHeaders); + request.query[constants_1.SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await (0, getPayloadHash_1.getPayloadHash)(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); } - } - done(); - }); - } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate, longDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + const hashedPayload = await (0, getPayloadHash_1.getPayloadHash)({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, util_hex_encoding_1.toHex)(await hash.digest()); + const stringToSign = [ + constants_1.EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerMarshaller.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const { shortDate } = formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion !== null && signingRegion !== void 0 ? signingRegion : (await this.regionProvider()); + const request = (0, prepareRequest_1.prepareRequest)(requestToSign); + const { longDate, shortDate } = formatDate(signingDate); + const scope = (0, credentialDerivation_1.createScope)(shortDate, region, signingService !== null && signingService !== void 0 ? signingService : this.service); + request.headers[constants_1.AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[constants_1.TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await (0, getPayloadHash_1.getPayloadHash)(request, this.sha256); + if (!(0, headerUtil_1.hasHeader)(constants_1.SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[constants_1.SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = (0, getCanonicalHeaders_1.getCanonicalHeaders)(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[constants_1.AUTH_HEADER] = + `${constants_1.ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${(0, getCanonicalQuery_1.getCanonicalQuery)(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest) { + const hash = new this.sha256(); + hash.update((0, util_utf8_1.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${constants_1.ALGORITHM_IDENTIFIER} +${longDate} +${credentialScope} +${(0, util_hex_encoding_1.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if ((pathSegment === null || pathSegment === void 0 ? void 0 : pathSegment.length) === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${(path === null || path === void 0 ? void 0 : path.startsWith("/")) ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && (path === null || path === void 0 ? void 0 : path.endsWith("/")) ? "/" : ""}`; + const doubleEncoded = encodeURIComponent(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest); + const hash = new this.sha256(await keyPromise); + hash.update((0, util_utf8_1.toUint8Array)(stringToSign)); + return (0, util_hex_encoding_1.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return (0, credentialDerivation_1.getSigningKey)(this.sha256, credentials, shortDate, region, service || this.service); + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } } +exports.SignatureV4 = SignatureV4; +const formatDate = (now) => { + const longDate = (0, utilDate_1.iso8601)(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; +}; +const getCanonicalHeaderList = (headers) => Object.keys(headers).sort().join(";"); -/** - * add api version header to endpoint operation - * @api private - */ -function addApiVersionHeader(endpointRequest) { - var api = endpointRequest.service.api; - var apiVersion = api.apiVersion; - if (apiVersion && !endpointRequest.httpRequest.headers['x-amz-api-version']) { - endpointRequest.httpRequest.headers['x-amz-api-version'] = apiVersion; - } -} -/** - * If api call gets invalid endpoint exception, SDK should attempt to remove the invalid - * endpoint from cache. - * @api private - */ -function invalidateCachedEndpoints(response) { - var error = response.error; - var httpResponse = response.httpResponse; - if (error && - (error.code === 'InvalidEndpointException' || httpResponse.statusCode === 421) - ) { - var request = response.request; - var operations = request.service.api.operations || {}; - var inputShape = operations[request.operation] ? operations[request.operation].input : undefined; - var identifiers = marshallCustomIdentifiers(request, inputShape); - var cacheKey = getCacheKey(request); - if (Object.keys(identifiers).length > 0) { - cacheKey = util.update(cacheKey, identifiers); - if (operations[request.operation]) cacheKey.operation = operations[request.operation].name; - } - AWS.endpointCache.remove(cacheKey); - } -} +/***/ }), -/** - * If endpoint is explicitly configured, SDK should not do endpoint discovery in anytime. - * @param [object] client Service client object. - * @api private - */ -function hasCustomEndpoint(client) { - //if set endpoint is set for specific client, enable endpoint discovery will raise an error. - if (client._originalConfig && client._originalConfig.endpoint && client._originalConfig.endpointDiscoveryEnabled === true) { - throw util.error(new Error(), { - code: 'ConfigurationException', - message: 'Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.' - }); - }; - var svcConfig = AWS.config[client.serviceIdentifier] || {}; - return Boolean(AWS.config.endpoint || svcConfig.endpoint || (client._originalConfig && client._originalConfig.endpoint)); -} +/***/ 69098: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -function isFalsy(value) { - return ['false', '0'].indexOf(value) >= 0; -} +"use strict"; -/** - * If endpoint discovery should perform for this request when no operation requires endpoint - * discovery for the given service. - * SDK performs config resolution in order like below: - * 1. If set in client configuration. - * 2. If set in env AWS_ENABLE_ENDPOINT_DISCOVERY. - * 3. If set in shared ini config file with key 'endpoint_discovery_enabled'. - * @param [object] request request object. - * @returns [boolean|undefined] if endpoint discovery config is not set in any source, this - * function returns undefined - * @api private - */ -function resolveEndpointDiscoveryConfig(request) { - var service = request.service || {}; - if (service.config.endpointDiscoveryEnabled !== undefined) { - return service.config.endpointDiscoveryEnabled; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cloneQuery = exports.cloneRequest = void 0; +const cloneRequest = ({ headers, query, ...rest }) => ({ + ...rest, + headers: { ...headers }, + query: query ? (0, exports.cloneQuery)(query) : undefined, +}); +exports.cloneRequest = cloneRequest; +const cloneQuery = (query) => Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; +}, {}); +exports.cloneQuery = cloneQuery; - //shared ini file is only available in Node - //not to check env in browser - if (util.isBrowser()) return undefined; - - // If any of recognized endpoint discovery config env is set - for (var i = 0; i < endpointDiscoveryEnabledEnvs.length; i++) { - var env = endpointDiscoveryEnabledEnvs[i]; - if (Object.prototype.hasOwnProperty.call(process.env, env)) { - if (process.env[env] === '' || process.env[env] === undefined) { - throw util.error(new Error(), { - code: 'ConfigurationException', - message: 'environmental variable ' + env + ' cannot be set to nothing' - }); - } - return !isFalsy(process.env[env]); - } - } - var configFile = {}; - try { - configFile = AWS.util.iniLoader ? AWS.util.iniLoader.loadFrom({ - isConfig: true, - filename: process.env[AWS.util.sharedConfigFileEnv] - }) : {}; - } catch (e) {} - var sharedFileConfig = configFile[ - process.env.AWS_PROFILE || AWS.util.defaultProfile - ] || {}; - if (Object.prototype.hasOwnProperty.call(sharedFileConfig, 'endpoint_discovery_enabled')) { - if (sharedFileConfig.endpoint_discovery_enabled === undefined) { - throw util.error(new Error(), { - code: 'ConfigurationException', - message: 'config file entry \'endpoint_discovery_enabled\' cannot be set to nothing' - }); - } - return !isFalsy(sharedFileConfig.endpoint_discovery_enabled); - } - return undefined; -} +/***/ }), -/** - * attach endpoint discovery logic to request object - * @param [object] request - * @api private - */ -function discoverEndpoint(request, done) { - var service = request.service || {}; - if (hasCustomEndpoint(service) || request.isPresigned()) return done(); - - var operations = service.api.operations || {}; - var operationModel = operations[request.operation]; - var isEndpointDiscoveryRequired = operationModel ? operationModel.endpointDiscoveryRequired : 'NULL'; - var isEnabled = resolveEndpointDiscoveryConfig(request); - var hasRequiredEndpointDiscovery = service.api.hasRequiredEndpointDiscovery; - if (isEnabled || hasRequiredEndpointDiscovery) { - // Once a customer enables endpoint discovery, the SDK should start appending - // the string endpoint-discovery to the user-agent on all requests. - request.httpRequest.appendToUserAgent('endpoint-discovery'); - } - switch (isEndpointDiscoveryRequired) { - case 'OPTIONAL': - if (isEnabled || hasRequiredEndpointDiscovery) { - // For a given service; if at least one operation requires endpoint discovery then the SDK must enable endpoint discovery - // by default for all operations of that service, including operations where endpoint discovery is optional. - optionalDiscoverEndpoint(request); - request.addNamedListener('INVALIDATE_CACHED_ENDPOINTS', 'extractError', invalidateCachedEndpoints); - } - done(); - break; - case 'REQUIRED': - if (isEnabled === false) { - // For a given operation; if endpoint discovery is required and it has been disabled on the SDK client, - // then the SDK must return a clear and actionable exception. - request.response.error = util.error(new Error(), { - code: 'ConfigurationException', - message: 'Endpoint Discovery is disabled but ' + service.api.className + '.' + request.operation + - '() requires it. Please check your configurations.' - }); - done(); - break; - } - request.addNamedListener('INVALIDATE_CACHED_ENDPOINTS', 'extractError', invalidateCachedEndpoints); - requiredDiscoverEndpoint(request, done); - break; - case 'NULL': - default: - done(); - break; - } -} +/***/ 48644: +/***/ ((__unused_webpack_module, exports) => { -module.exports = { - discoverEndpoint: discoverEndpoint, - requiredDiscoverEndpoint: requiredDiscoverEndpoint, - optionalDiscoverEndpoint: optionalDiscoverEndpoint, - marshallCustomIdentifiers: marshallCustomIdentifiers, - getCacheKey: getCacheKey, - invalidateCachedEndpoint: invalidateCachedEndpoints, +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MAX_PRESIGNED_TTL = exports.KEY_TYPE_IDENTIFIER = exports.MAX_CACHE_SIZE = exports.UNSIGNED_PAYLOAD = exports.EVENT_ALGORITHM_IDENTIFIER = exports.ALGORITHM_IDENTIFIER_V4A = exports.ALGORITHM_IDENTIFIER = exports.UNSIGNABLE_PATTERNS = exports.SEC_HEADER_PATTERN = exports.PROXY_HEADER_PATTERN = exports.ALWAYS_UNSIGNABLE_HEADERS = exports.HOST_HEADER = exports.TOKEN_HEADER = exports.SHA256_HEADER = exports.SIGNATURE_HEADER = exports.GENERATED_HEADERS = exports.DATE_HEADER = exports.AMZ_DATE_HEADER = exports.AUTH_HEADER = exports.REGION_SET_PARAM = exports.TOKEN_QUERY_PARAM = exports.SIGNATURE_QUERY_PARAM = exports.EXPIRES_QUERY_PARAM = exports.SIGNED_HEADERS_QUERY_PARAM = exports.AMZ_DATE_QUERY_PARAM = exports.CREDENTIAL_QUERY_PARAM = exports.ALGORITHM_QUERY_PARAM = void 0; +exports.ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +exports.CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +exports.AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +exports.SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +exports.EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +exports.SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +exports.TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +exports.REGION_SET_PARAM = "X-Amz-Region-Set"; +exports.AUTH_HEADER = "authorization"; +exports.AMZ_DATE_HEADER = exports.AMZ_DATE_QUERY_PARAM.toLowerCase(); +exports.DATE_HEADER = "date"; +exports.GENERATED_HEADERS = [exports.AUTH_HEADER, exports.AMZ_DATE_HEADER, exports.DATE_HEADER]; +exports.SIGNATURE_HEADER = exports.SIGNATURE_QUERY_PARAM.toLowerCase(); +exports.SHA256_HEADER = "x-amz-content-sha256"; +exports.TOKEN_HEADER = exports.TOKEN_QUERY_PARAM.toLowerCase(); +exports.HOST_HEADER = "host"; +exports.ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, }; +exports.PROXY_HEADER_PATTERN = /^proxy-/; +exports.SEC_HEADER_PATTERN = /^sec-/; +exports.UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +exports.ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +exports.ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +exports.EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +exports.UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +exports.MAX_CACHE_SIZE = 50; +exports.KEY_TYPE_IDENTIFIER = "aws4_request"; +exports.MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; /***/ }), -/***/ 76663: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var util = AWS.util; -var typeOf = (__nccwpck_require__(48084).typeOf); -var DynamoDBSet = __nccwpck_require__(20304); -var NumberValue = __nccwpck_require__(91593); - -AWS.DynamoDB.Converter = { - /** - * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type - * - * @param data [any] The data to convert to a DynamoDB AttributeValue - * @param options [map] - * @option options convertEmptyValues [Boolean] Whether to automatically - * convert empty strings, blobs, - * and sets to `null` - * @option options wrapNumbers [Boolean] Whether to return numbers as a - * NumberValue object instead of - * converting them to native JavaScript - * numbers. This allows for the safe - * round-trip transport of numbers of - * arbitrary size. - * @return [map] An object in the Amazon DynamoDB AttributeValue format - * - * @see AWS.DynamoDB.Converter.marshall AWS.DynamoDB.Converter.marshall to - * convert entire records (rather than individual attributes) - */ - input: function convertInput(data, options) { - options = options || {}; - var type = typeOf(data); - if (type === 'Object') { - return formatMap(data, options); - } else if (type === 'Array') { - return formatList(data, options); - } else if (type === 'Set') { - return formatSet(data, options); - } else if (type === 'String') { - if (data.length === 0 && options.convertEmptyValues) { - return convertInput(null); - } - return { S: data }; - } else if (type === 'Number' || type === 'NumberValue') { - return { N: data.toString() }; - } else if (type === 'Binary') { - if (data.length === 0 && options.convertEmptyValues) { - return convertInput(null); - } - return { B: data }; - } else if (type === 'Boolean') { - return { BOOL: data }; - } else if (type === 'null') { - return { NULL: true }; - } else if (type !== 'undefined' && type !== 'Function') { - // this value has a custom constructor - return formatMap(data, options); - } - }, - - /** - * Convert a JavaScript object into a DynamoDB record. - * - * @param data [any] The data to convert to a DynamoDB record - * @param options [map] - * @option options convertEmptyValues [Boolean] Whether to automatically - * convert empty strings, blobs, - * and sets to `null` - * @option options wrapNumbers [Boolean] Whether to return numbers as a - * NumberValue object instead of - * converting them to native JavaScript - * numbers. This allows for the safe - * round-trip transport of numbers of - * arbitrary size. - * - * @return [map] An object in the DynamoDB record format. - * - * @example Convert a JavaScript object into a DynamoDB record - * var marshalled = AWS.DynamoDB.Converter.marshall({ - * string: 'foo', - * list: ['fizz', 'buzz', 'pop'], - * map: { - * nestedMap: { - * key: 'value', - * } - * }, - * number: 123, - * nullValue: null, - * boolValue: true, - * stringSet: new DynamoDBSet(['foo', 'bar', 'baz']) - * }); - */ - marshall: function marshallItem(data, options) { - return AWS.DynamoDB.Converter.input(data, options).M; - }, +/***/ 19623: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. - * - * @param data [map] An object in the Amazon DynamoDB AttributeValue format - * @param options [map] - * @option options convertEmptyValues [Boolean] Whether to automatically - * convert empty strings, blobs, - * and sets to `null` - * @option options wrapNumbers [Boolean] Whether to return numbers as a - * NumberValue object instead of - * converting them to native JavaScript - * numbers. This allows for the safe - * round-trip transport of numbers of - * arbitrary size. - * - * @return [Object|Array|String|Number|Boolean|null] - * - * @see AWS.DynamoDB.Converter.unmarshall AWS.DynamoDB.Converter.unmarshall to - * convert entire records (rather than individual attributes) - */ - output: function convertOutput(data, options) { - options = options || {}; - var list, map, i; - for (var type in data) { - var values = data[type]; - if (type === 'M') { - map = {}; - for (var key in values) { - map[key] = convertOutput(values[key], options); - } - return map; - } else if (type === 'L') { - list = []; - for (i = 0; i < values.length; i++) { - list.push(convertOutput(values[i], options)); - } - return list; - } else if (type === 'SS') { - list = []; - for (i = 0; i < values.length; i++) { - list.push(values[i] + ''); - } - return new DynamoDBSet(list); - } else if (type === 'NS') { - list = []; - for (i = 0; i < values.length; i++) { - list.push(convertNumber(values[i], options.wrapNumbers)); - } - return new DynamoDBSet(list); - } else if (type === 'BS') { - list = []; - for (i = 0; i < values.length; i++) { - list.push(AWS.util.buffer.toBuffer(values[i])); - } - return new DynamoDBSet(list); - } else if (type === 'S') { - return values + ''; - } else if (type === 'N') { - return convertNumber(values, options.wrapNumbers); - } else if (type === 'B') { - return util.buffer.toBuffer(values); - } else if (type === 'BOOL') { - return (values === 'true' || values === 'TRUE' || values === true); - } else if (type === 'NULL') { - return null; - } - } - }, +"use strict"; - /** - * Convert a DynamoDB record into a JavaScript object. - * - * @param data [any] The DynamoDB record - * @param options [map] - * @option options convertEmptyValues [Boolean] Whether to automatically - * convert empty strings, blobs, - * and sets to `null` - * @option options wrapNumbers [Boolean] Whether to return numbers as a - * NumberValue object instead of - * converting them to native JavaScript - * numbers. This allows for the safe - * round-trip transport of numbers of - * arbitrary size. - * - * @return [map] An object whose properties have been converted from - * DynamoDB's AttributeValue format into their corresponding native - * JavaScript types. - * - * @example Convert a record received from a DynamoDB stream - * var unmarshalled = AWS.DynamoDB.Converter.unmarshall({ - * string: {S: 'foo'}, - * list: {L: [{S: 'fizz'}, {S: 'buzz'}, {S: 'pop'}]}, - * map: { - * M: { - * nestedMap: { - * M: { - * key: {S: 'value'} - * } - * } - * } - * }, - * number: {N: '123'}, - * nullValue: {NULL: true}, - * boolValue: {BOOL: true} - * }); - */ - unmarshall: function unmarshall(data, options) { - return AWS.DynamoDB.Converter.output({M: data}, options); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.clearCredentialCache = exports.getSigningKey = exports.createScope = void 0; +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const signingKeyCache = {}; +const cacheQueue = []; +const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${constants_1.KEY_TYPE_IDENTIFIER}`; +exports.createScope = createScope; +const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, util_hex_encoding_1.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > constants_1.MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, constants_1.KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +exports.getSigningKey = getSigningKey; +const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +exports.clearCredentialCache = clearCredentialCache; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, util_utf8_1.toUint8Array)(data)); + return hash.digest(); }; -/** - * @api private - * @param data [Array] - * @param options [map] - */ -function formatList(data, options) { - var list = {L: []}; - for (var i = 0; i < data.length; i++) { - list['L'].push(AWS.DynamoDB.Converter.input(data[i], options)); - } - return list; -} - -/** - * @api private - * @param value [String] - * @param wrapNumbers [Boolean] - */ -function convertNumber(value, wrapNumbers) { - return wrapNumbers ? new NumberValue(value) : Number(value); -} -/** - * @api private - * @param data [map] - * @param options [map] - */ -function formatMap(data, options) { - var map = {M: {}}; - for (var key in data) { - var formatted = AWS.DynamoDB.Converter.input(data[key], options); - if (formatted !== void 0) { - map['M'][key] = formatted; - } - } - return map; -} +/***/ }), -/** - * @api private - */ -function formatSet(data, options) { - options = options || {}; - var values = data.values; - if (options.convertEmptyValues) { - values = filterEmptySetValues(data); - if (values.length === 0) { - return AWS.DynamoDB.Converter.input(null); - } - } +/***/ 51393: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var map = {}; - switch (data.type) { - case 'String': map['SS'] = values; break; - case 'Binary': map['BS'] = values; break; - case 'Number': map['NS'] = values.map(function (value) { - return value.toString(); - }); - } - return map; -} +"use strict"; -/** - * @api private - */ -function filterEmptySetValues(set) { - var nonEmptyValues = []; - var potentiallyEmptyTypes = { - String: true, - Binary: true, - Number: false - }; - if (potentiallyEmptyTypes[set.type]) { - for (var i = 0; i < set.values.length; i++) { - if (set.values[i].length === 0) { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalHeaders = void 0; +const constants_1 = __nccwpck_require__(48644); +const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in constants_1.ALWAYS_UNSIGNABLE_HEADERS || + (unsignableHeaders === null || unsignableHeaders === void 0 ? void 0 : unsignableHeaders.has(canonicalHeaderName)) || + constants_1.PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + constants_1.SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { continue; } - nonEmptyValues.push(set.values[i]); } - - return nonEmptyValues; + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); } - - return set.values; -} - -/** - * @api private - */ -module.exports = AWS.DynamoDB.Converter; + return canonical; +}; +exports.getCanonicalHeaders = getCanonicalHeaders; /***/ }), -/***/ 90030: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var Translator = __nccwpck_require__(34222); -var DynamoDBSet = __nccwpck_require__(20304); +/***/ 33243: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * The document client simplifies working with items in Amazon DynamoDB - * by abstracting away the notion of attribute values. This abstraction - * annotates native JavaScript types supplied as input parameters, as well - * as converts annotated response data to native JavaScript types. - * - * ## Marshalling Input and Unmarshalling Response Data - * - * The document client affords developers the use of native JavaScript types - * instead of `AttributeValue`s to simplify the JavaScript development - * experience with Amazon DynamoDB. JavaScript objects passed in as parameters - * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. - * Responses from DynamoDB are unmarshalled into plain JavaScript objects - * by the `DocumentClient`. The `DocumentClient`, does not accept - * `AttributeValue`s in favor of native JavaScript types. - * - * | JavaScript Type | DynamoDB AttributeValue | - * |:----------------------------------------------------------------------:|-------------------------| - * | String | S | - * | Number | N | - * | Boolean | BOOL | - * | null | NULL | - * | Array | L | - * | Object | M | - * | Buffer, File, Blob, ArrayBuffer, DataView, and JavaScript typed arrays | B | - * - * ## Support for Sets - * - * The `DocumentClient` offers a convenient way to create sets from - * JavaScript Arrays. The type of set is inferred from the first element - * in the array. DynamoDB supports string, number, and binary sets. To - * learn more about supported types see the - * [Amazon DynamoDB Data Model Documentation](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html) - * For more information see {AWS.DynamoDB.DocumentClient.createSet} - * - */ -AWS.DynamoDB.DocumentClient = AWS.util.inherit({ - - /** - * Creates a DynamoDB document client with a set of configuration options. - * - * @option options params [map] An optional map of parameters to bind to every - * request sent by this service object. - * @option options service [AWS.DynamoDB] An optional pre-configured instance - * of the AWS.DynamoDB service object. This instance's config will be - * copied to a new instance used by this client. You should not need to - * retain a reference to the input object, and may destroy it or allow it - * to be garbage collected. - * @option options convertEmptyValues [Boolean] set to true if you would like - * the document client to convert empty values (0-length strings, binary - * buffers, and sets) to be converted to NULL types when persisting to - * DynamoDB. - * @option options wrapNumbers [Boolean] Set to true to return numbers as a - * NumberValue object instead of converting them to native JavaScript numbers. - * This allows for the safe round-trip transport of numbers of arbitrary size. - * @see AWS.DynamoDB.constructor - * - */ - constructor: function DocumentClient(options) { - var self = this; - self.options = options || {}; - self.configure(self.options); - }, +"use strict"; - /** - * @api private - */ - configure: function configure(options) { - var self = this; - self.service = options.service; - self.bindServiceObject(options); - self.attrValue = options.attrValue = - self.service.api.operations.putItem.input.members.Item.value.shape; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCanonicalQuery = void 0; +const util_uri_escape_1 = __nccwpck_require__(54197); +const constants_1 = __nccwpck_require__(48644); +const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query).sort()) { + if (key.toLowerCase() === constants_1.SIGNATURE_HEADER) { + continue; + } + keys.push(key); + const value = query[key]; + if (typeof value === "string") { + serialized[key] = `${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`; + } + else if (Array.isArray(value)) { + serialized[key] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${(0, util_uri_escape_1.escapeUri)(key)}=${(0, util_uri_escape_1.escapeUri)(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; +exports.getCanonicalQuery = getCanonicalQuery; - /** - * @api private - */ - bindServiceObject: function bindServiceObject(options) { - var self = this; - options = options || {}; - if (!self.service) { - self.service = new AWS.DynamoDB(options); - } else { - var config = AWS.util.copy(self.service.config); - self.service = new self.service.constructor.__super__(config); - self.service.config.params = - AWS.util.merge(self.service.config.params || {}, options.params); - } - }, +/***/ }), - /** - * @api private - */ - makeServiceRequest: function(operation, params, callback) { - var self = this; - var request = self.service[operation](params); - self.setupRequest(request); - self.setupResponse(request); - if (typeof callback === 'function') { - request.send(callback); - } - return request; - }, +/***/ 48545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * @api private - */ - serviceClientOperationsMap: { - batchGet: 'batchGetItem', - batchWrite: 'batchWriteItem', - delete: 'deleteItem', - get: 'getItem', - put: 'putItem', - query: 'query', - scan: 'scan', - update: 'updateItem', - transactGet: 'transactGetItems', - transactWrite: 'transactWriteItems' - }, +"use strict"; - /** - * Returns the attributes of one or more items from one or more tables - * by delegating to `AWS.DynamoDB.batchGetItem()`. - * - * Supply the same parameters as {AWS.DynamoDB.batchGetItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.batchGetItem - * @example Get items from multiple tables - * var params = { - * RequestItems: { - * 'Table-1': { - * Keys: [ - * { - * HashKey: 'haskey', - * NumberRangeKey: 1 - * } - * ] - * }, - * 'Table-2': { - * Keys: [ - * { foo: 'bar' }, - * ] - * } - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.batchGet(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - batchGet: function(params, callback) { - var operation = this.serviceClientOperationsMap['batchGet']; - return this.makeServiceRequest(operation, params, callback); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPayloadHash = void 0; +const is_array_buffer_1 = __nccwpck_require__(10780); +const util_hex_encoding_1 = __nccwpck_require__(45364); +const util_utf8_1 = __nccwpck_require__(41895); +const constants_1 = __nccwpck_require__(48644); +const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === constants_1.SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, is_array_buffer_1.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, util_utf8_1.toUint8Array)(body)); + return (0, util_hex_encoding_1.toHex)(await hashCtor.digest()); + } + return constants_1.UNSIGNED_PAYLOAD; +}; +exports.getPayloadHash = getPayloadHash; - /** - * Puts or deletes multiple items in one or more tables by delegating - * to `AWS.DynamoDB.batchWriteItem()`. - * - * Supply the same parameters as {AWS.DynamoDB.batchWriteItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.batchWriteItem - * @example Write to and delete from a table - * var params = { - * RequestItems: { - * 'Table-1': [ - * { - * DeleteRequest: { - * Key: { HashKey: 'someKey' } - * } - * }, - * { - * PutRequest: { - * Item: { - * HashKey: 'anotherKey', - * NumAttribute: 1, - * BoolAttribute: true, - * ListAttribute: [1, 'two', false], - * MapAttribute: { foo: 'bar' } - * } - * } - * } - * ] - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.batchWrite(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - batchWrite: function(params, callback) { - var operation = this.serviceClientOperationsMap['batchWrite']; - return this.makeServiceRequest(operation, params, callback); - }, - /** - * Deletes a single item in a table by primary key by delegating to - * `AWS.DynamoDB.deleteItem()` - * - * Supply the same parameters as {AWS.DynamoDB.deleteItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.deleteItem - * @example Delete an item from a table - * var params = { - * TableName : 'Table', - * Key: { - * HashKey: 'hashkey', - * NumberRangeKey: 1 - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.delete(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - delete: function(params, callback) { - var operation = this.serviceClientOperationsMap['delete']; - return this.makeServiceRequest(operation, params, callback); - }, +/***/ }), - /** - * Returns a set of attributes for the item with the given primary key - * by delegating to `AWS.DynamoDB.getItem()`. - * - * Supply the same parameters as {AWS.DynamoDB.getItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.getItem - * @example Get an item from a table - * var params = { - * TableName : 'Table', - * Key: { - * HashKey: 'hashkey' - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.get(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - get: function(params, callback) { - var operation = this.serviceClientOperationsMap['get']; - return this.makeServiceRequest(operation, params, callback); - }, +/***/ 62179: +/***/ ((__unused_webpack_module, exports) => { - /** - * Creates a new item, or replaces an old item with a new item by - * delegating to `AWS.DynamoDB.putItem()`. - * - * Supply the same parameters as {AWS.DynamoDB.putItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.putItem - * @example Create a new item in a table - * var params = { - * TableName : 'Table', - * Item: { - * HashKey: 'haskey', - * NumAttribute: 1, - * BoolAttribute: true, - * ListAttribute: [1, 'two', false], - * MapAttribute: { foo: 'bar'}, - * NullAttribute: null - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.put(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - put: function(params, callback) { - var operation = this.serviceClientOperationsMap['put']; - return this.makeServiceRequest(operation, params, callback); - }, +"use strict"; - /** - * Edits an existing item's attributes, or adds a new item to the table if - * it does not already exist by delegating to `AWS.DynamoDB.updateItem()`. - * - * Supply the same parameters as {AWS.DynamoDB.updateItem} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.updateItem - * @example Update an item with expressions - * var params = { - * TableName: 'Table', - * Key: { HashKey : 'hashkey' }, - * UpdateExpression: 'set #a = :x + :y', - * ConditionExpression: '#a < :MAX', - * ExpressionAttributeNames: {'#a' : 'Sum'}, - * ExpressionAttributeValues: { - * ':x' : 20, - * ':y' : 45, - * ':MAX' : 100, - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.update(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - update: function(params, callback) { - var operation = this.serviceClientOperationsMap['update']; - return this.makeServiceRequest(operation, params, callback); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteHeader = exports.getHeaderValue = exports.hasHeader = void 0; +const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +exports.hasHeader = hasHeader; +const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +exports.getHeaderValue = getHeaderValue; +const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; +exports.deleteHeader = deleteHeader; - /** - * Returns one or more items and item attributes by accessing every item - * in a table or a secondary index. - * - * Supply the same parameters as {AWS.DynamoDB.scan} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.scan - * @example Scan the table with a filter expression - * var params = { - * TableName : 'Table', - * FilterExpression : 'Year = :this_year', - * ExpressionAttributeValues : {':this_year' : 2015} - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.scan(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - scan: function(params, callback) { - var operation = this.serviceClientOperationsMap['scan']; - return this.makeServiceRequest(operation, params, callback); - }, - /** - * Directly access items from a table by primary key or a secondary index. - * - * Supply the same parameters as {AWS.DynamoDB.query} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.query - * @example Query an index - * var params = { - * TableName: 'Table', - * IndexName: 'Index', - * KeyConditionExpression: 'HashKey = :hkey and RangeKey > :rkey', - * ExpressionAttributeValues: { - * ':hkey': 'key', - * ':rkey': 2015 - * } - * }; - * - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * documentClient.query(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - query: function(params, callback) { - var operation = this.serviceClientOperationsMap['query']; - return this.makeServiceRequest(operation, params, callback); - }, +/***/ }), - /** - * Synchronous write operation that groups up to 25 action requests. - * - * Supply the same parameters as {AWS.DynamoDB.transactWriteItems} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.transactWriteItems - * @example Get items from multiple tables - * var params = { - * TransactItems: [{ - * Put: { - * TableName : 'Table0', - * Item: { - * HashKey: 'haskey', - * NumAttribute: 1, - * BoolAttribute: true, - * ListAttribute: [1, 'two', false], - * MapAttribute: { foo: 'bar'}, - * NullAttribute: null - * } - * } - * }, { - * Update: { - * TableName: 'Table1', - * Key: { HashKey : 'hashkey' }, - * UpdateExpression: 'set #a = :x + :y', - * ConditionExpression: '#a < :MAX', - * ExpressionAttributeNames: {'#a' : 'Sum'}, - * ExpressionAttributeValues: { - * ':x' : 20, - * ':y' : 45, - * ':MAX' : 100, - * } - * } - * }] - * }; - * - * documentClient.transactWrite(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - */ - transactWrite: function(params, callback) { - var operation = this.serviceClientOperationsMap['transactWrite']; - return this.makeServiceRequest(operation, params, callback); - }, +/***/ 11528: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * Atomically retrieves multiple items from one or more tables (but not from indexes) - * in a single account and region. - * - * Supply the same parameters as {AWS.DynamoDB.transactGetItems} with - * `AttributeValue`s substituted by native JavaScript types. - * - * @see AWS.DynamoDB.transactGetItems - * @example Get items from multiple tables - * var params = { - * TransactItems: [{ - * Get: { - * TableName : 'Table0', - * Key: { - * HashKey: 'hashkey0' - * } - * } - * }, { - * Get: { - * TableName : 'Table1', - * Key: { - * HashKey: 'hashkey1' - * } - * } - * }] - * }; - * - * documentClient.transactGet(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - */ - transactGet: function(params, callback) { - var operation = this.serviceClientOperationsMap['transactGet']; - return this.makeServiceRequest(operation, params, callback); - }, +"use strict"; - /** - * Creates a set of elements inferring the type of set from - * the type of the first element. Amazon DynamoDB currently supports - * the number sets, string sets, and binary sets. For more information - * about DynamoDB data types see the documentation on the - * [Amazon DynamoDB Data Model](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModel.DataTypes). - * - * @param list [Array] Collection to represent your DynamoDB Set - * @param options [map] - * * **validate** [Boolean] set to true if you want to validate the type - * of each element in the set. Defaults to `false`. - * @example Creating a number set - * var documentClient = new AWS.DynamoDB.DocumentClient(); - * - * var params = { - * Item: { - * hashkey: 'hashkey' - * numbers: documentClient.createSet([1, 2, 3]); - * } - * }; - * - * documentClient.put(params, function(err, data) { - * if (err) console.log(err); - * else console.log(data); - * }); - * - */ - createSet: function(list, options) { - options = options || {}; - return new DynamoDBSet(list, options); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = exports.moveHeadersToQuery = exports.getPayloadHash = exports.getCanonicalQuery = exports.getCanonicalHeaders = void 0; +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(39733), exports); +var getCanonicalHeaders_1 = __nccwpck_require__(51393); +Object.defineProperty(exports, "getCanonicalHeaders", ({ enumerable: true, get: function () { return getCanonicalHeaders_1.getCanonicalHeaders; } })); +var getCanonicalQuery_1 = __nccwpck_require__(33243); +Object.defineProperty(exports, "getCanonicalQuery", ({ enumerable: true, get: function () { return getCanonicalQuery_1.getCanonicalQuery; } })); +var getPayloadHash_1 = __nccwpck_require__(48545); +Object.defineProperty(exports, "getPayloadHash", ({ enumerable: true, get: function () { return getPayloadHash_1.getPayloadHash; } })); +var moveHeadersToQuery_1 = __nccwpck_require__(49828); +Object.defineProperty(exports, "moveHeadersToQuery", ({ enumerable: true, get: function () { return moveHeadersToQuery_1.moveHeadersToQuery; } })); +var prepareRequest_1 = __nccwpck_require__(60075); +Object.defineProperty(exports, "prepareRequest", ({ enumerable: true, get: function () { return prepareRequest_1.prepareRequest; } })); +tslib_1.__exportStar(__nccwpck_require__(19623), exports); - /** - * @api private - */ - getTranslator: function() { - return new Translator(this.options); - }, - /** - * @api private - */ - setupRequest: function setupRequest(request) { - var self = this; - var translator = self.getTranslator(); - var operation = request.operation; - var inputShape = request.service.api.operations[operation].input; - request._events.validate.unshift(function(req) { - req.rawParams = AWS.util.copy(req.params); - req.params = translator.translateInput(req.rawParams, inputShape); - }); - }, +/***/ }), - /** - * @api private - */ - setupResponse: function setupResponse(request) { - var self = this; - var translator = self.getTranslator(); - var outputShape = self.service.api.operations[request.operation].output; - request.on('extractData', function(response) { - response.data = translator.translateOutput(response.data, outputShape); - }); +/***/ 49828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var response = request.response; - response.nextPage = function(cb) { - var resp = this; - var req = resp.request; - var config; - var service = req.service; - var operation = req.operation; - try { - config = service.paginationConfig(operation, true); - } catch (e) { resp.error = e; } - - if (!resp.hasNextPage()) { - if (cb) cb(resp.error, null); - else if (resp.error) throw resp.error; - return null; - } +"use strict"; - var params = AWS.util.copy(req.rawParams); - if (!resp.nextPageTokens) { - return cb ? cb(null, null) : null; - } else { - var inputTokens = config.inputToken; - if (typeof inputTokens === 'string') inputTokens = [inputTokens]; - for (var i = 0; i < inputTokens.length; i++) { - params[inputTokens[i]] = resp.nextPageTokens[i]; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.moveHeadersToQuery = void 0; +const cloneRequest_1 = __nccwpck_require__(69098); +const moveHeadersToQuery = (request, options = {}) => { + var _a; + const { headers, query = {} } = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !((_a = options.unhoistableHeaders) === null || _a === void 0 ? void 0 : _a.has(lname))) { + query[name] = headers[name]; + delete headers[name]; } - return self[operation](params, cb); - } + } + return { + ...request, + headers, + query, }; - } - -}); - -/** - * @api private - */ -module.exports = AWS.DynamoDB.DocumentClient; +}; +exports.moveHeadersToQuery = moveHeadersToQuery; /***/ }), -/***/ 91593: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 60075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var util = (__nccwpck_require__(28437).util); +"use strict"; -/** - * An object recognizable as a numeric value that stores the underlying number - * as a string. - * - * Intended to be a deserialization target for the DynamoDB Document Client when - * the `wrapNumbers` flag is set. This allows for numeric values that lose - * precision when converted to JavaScript's `number` type. - */ -var DynamoDBNumberValue = util.inherit({ - constructor: function NumberValue(value) { - this.wrapperName = 'NumberValue'; - this.value = value.toString(); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareRequest = void 0; +const cloneRequest_1 = __nccwpck_require__(69098); +const constants_1 = __nccwpck_require__(48644); +const prepareRequest = (request) => { + request = typeof request.clone === "function" ? request.clone() : (0, cloneRequest_1.cloneRequest)(request); + for (const headerName of Object.keys(request.headers)) { + if (constants_1.GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; +exports.prepareRequest = prepareRequest; - /** - * Render the underlying value as a number when converting to JSON. - */ - toJSON: function () { - return this.toNumber(); - }, - /** - * Convert the underlying value to a JavaScript number. - */ - toNumber: function () { - return Number(this.value); - }, +/***/ }), - /** - * Return a string representing the unaltered value provided to the - * constructor. - */ - toString: function () { - return this.value; - } -}); +/***/ 39299: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -module.exports = DynamoDBNumberValue; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toDate = exports.iso8601 = void 0; +const iso8601 = (time) => (0, exports.toDate)(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +exports.iso8601 = iso8601; +const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; +exports.toDate = toDate; /***/ }), -/***/ 20304: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 70438: +/***/ ((__unused_webpack_module, exports) => { -var util = (__nccwpck_require__(28437).util); -var typeOf = (__nccwpck_require__(48084).typeOf); +"use strict"; -/** - * @api private - */ -var memberTypeToSetType = { - 'String': 'String', - 'Number': 'Number', - 'NumberValue': 'Number', - 'Binary': 'Binary' -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NoOpLogger = void 0; +class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} +exports.NoOpLogger = NoOpLogger; -/** - * @api private - */ -var DynamoDBSet = util.inherit({ - constructor: function Set(list, options) { - options = options || {}; - this.wrapperName = 'Set'; - this.initialize(list, options.validate); - }, +/***/ }), - initialize: function(list, validate) { - var self = this; - self.values = [].concat(list); - self.detectType(); - if (validate) { - self.validate(); - } - }, +/***/ 61600: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - detectType: function() { - this.type = memberTypeToSetType[typeOf(this.values[0])]; - if (!this.type) { - throw util.error(new Error(), { - code: 'InvalidSetType', - message: 'Sets can contain string, number, or binary values' - }); - } - }, +"use strict"; - validate: function() { - var self = this; - var length = self.values.length; - var values = self.values; - for (var i = 0; i < length; i++) { - if (memberTypeToSetType[typeOf(values[i])] !== self.type) { - throw util.error(new Error(), { - code: 'InvalidType', - message: self.type + ' Set contains ' + typeOf(values[i]) + ' value' - }); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Client = void 0; +const middleware_stack_1 = __nccwpck_require__(97911); +class Client { + constructor(config) { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); + this.config = config; + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } } - }, - - /** - * Render the underlying values only when converting to JSON. - */ - toJSON: function() { - var self = this; - return self.values; - } - -}); - -/** - * @api private - */ -module.exports = DynamoDBSet; + destroy() { + if (this.config.requestHandler.destroy) + this.config.requestHandler.destroy(); + } +} +exports.Client = Client; /***/ }), -/***/ 34222: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = (__nccwpck_require__(28437).util); -var convert = __nccwpck_require__(76663); +/***/ 32813: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var Translator = function(options) { - options = options || {}; - this.attrValue = options.attrValue; - this.convertEmptyValues = Boolean(options.convertEmptyValues); - this.wrapNumbers = Boolean(options.wrapNumbers); -}; +"use strict"; -Translator.prototype.translateInput = function(value, shape) { - this.mode = 'input'; - return this.translate(value, shape); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.collectBody = void 0; +const util_stream_1 = __nccwpck_require__(96607); +const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return util_stream_1.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return util_stream_1.Uint8ArrayBlobAdapter.mutate(await fromContext); }; +exports.collectBody = collectBody; -Translator.prototype.translateOutput = function(value, shape) { - this.mode = 'output'; - return this.translate(value, shape); -}; -Translator.prototype.translate = function(value, shape) { - var self = this; - if (!shape || value === undefined) return undefined; +/***/ }), - if (shape.shape === self.attrValue) { - return convert[self.mode](value, { - convertEmptyValues: self.convertEmptyValues, - wrapNumbers: self.wrapNumbers, - }); - } - switch (shape.type) { - case 'structure': return self.translateStructure(value, shape); - case 'map': return self.translateMap(value, shape); - case 'list': return self.translateList(value, shape); - default: return self.translateScalar(value, shape); - } -}; +/***/ 75414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -Translator.prototype.translateStructure = function(structure, shape) { - var self = this; - if (structure == null) return undefined; +"use strict"; - var struct = {}; - util.each(structure, function(name, value) { - var memberShape = shape.members[name]; - if (memberShape) { - var result = self.translate(value, memberShape); - if (result !== undefined) struct[name] = result; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Command = void 0; +const middleware_stack_1 = __nccwpck_require__(97911); +class Command { + constructor() { + this.middlewareStack = (0, middleware_stack_1.constructStack)(); } - }); - return struct; -}; - -Translator.prototype.translateList = function(list, shape) { - var self = this; - if (list == null) return undefined; +} +exports.Command = Command; - var out = []; - util.arrayEach(list, function(value) { - var result = self.translate(value, shape.member); - if (result === undefined) out.push(null); - else out.push(result); - }); - return out; -}; -Translator.prototype.translateMap = function(map, shape) { - var self = this; - if (map == null) return undefined; +/***/ }), - var out = {}; - util.each(map, function(key, value) { - var result = self.translate(value, shape.value); - if (result === undefined) out[key] = null; - else out[key] = result; - }); - return out; -}; +/***/ 92541: +/***/ ((__unused_webpack_module, exports) => { -Translator.prototype.translateScalar = function(value, shape) { - return shape.toType(value); -}; +"use strict"; -/** - * @api private - */ -module.exports = Translator; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SENSITIVE_STRING = void 0; +exports.SENSITIVE_STRING = "***SensitiveInformation***"; /***/ }), -/***/ 48084: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = (__nccwpck_require__(28437).util); - -function typeOf(data) { - if (data === null && typeof data === 'object') { - return 'null'; - } else if (data !== undefined && isBinary(data)) { - return 'Binary'; - } else if (data !== undefined && data.constructor) { - return data.wrapperName || util.typeName(data.constructor); - } else if (data !== undefined && typeof data === 'object') { - // this object is the result of Object.create(null), hence the absence of a - // defined constructor - return 'Object'; - } else { - return 'undefined'; - } -} +/***/ 56929: +/***/ ((__unused_webpack_module, exports) => { -function isBinary(data) { - var types = [ - 'Buffer', 'File', 'Blob', 'ArrayBuffer', 'DataView', - 'Int8Array', 'Uint8Array', 'Uint8ClampedArray', - 'Int16Array', 'Uint16Array', 'Int32Array', 'Uint32Array', - 'Float32Array', 'Float64Array' - ]; - if (util.isNode()) { - var Stream = util.stream.Stream; - if (util.Buffer.isBuffer(data) || data instanceof Stream) { - return true; - } - } +"use strict"; - for (var i = 0; i < types.length; i++) { - if (data !== undefined && data.constructor) { - if (util.isType(data, types[i])) return true; - if (util.typeName(data.constructor) === types[i]) return true; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createAggregatedClient = void 0; +const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; } - } - - return false; -} - -/** - * @api private - */ -module.exports = { - typeOf: typeOf, - isBinary: isBinary }; +exports.createAggregatedClient = createAggregatedClient; /***/ }), -/***/ 63727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21737: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; -var eventMessageChunker = (__nccwpck_require__(73630).eventMessageChunker); -var parseEvent = (__nccwpck_require__(52123).parseEvent); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseEpochTimestamp = exports.parseRfc7231DateTime = exports.parseRfc3339DateTimeWithOffset = exports.parseRfc3339DateTime = exports.dateToUtcString = void 0; +const parse_utils_1 = __nccwpck_require__(74857); +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +exports.dateToUtcString = dateToUtcString; +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +exports.parseRfc3339DateTime = parseRfc3339DateTime; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +exports.parseRfc3339DateTimeWithOffset = parseRfc3339DateTimeWithOffset; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate((0, parse_utils_1.strictParseShort)(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +exports.parseRfc7231DateTime = parseRfc7231DateTime; +const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = (0, parse_utils_1.strictParseDouble)(value); + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +exports.parseEpochTimestamp = parseEpochTimestamp; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + (0, parse_utils_1.strictParseShort)(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = (0, parse_utils_1.strictParseByte)(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return (0, parse_utils_1.strictParseFloat32)("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; -function createEventStream(body, parser, model) { - var eventMessages = eventMessageChunker(body); - var events = []; +/***/ }), - for (var i = 0; i < eventMessages.length; i++) { - events.push(parseEvent(parser, eventMessages[i], model)); - } +/***/ 9681: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return events; -} +"use strict"; -/** - * @api private - */ -module.exports = { - createEventStream: createEventStream +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.withBaseException = exports.throwDefaultError = void 0; +const exceptions_1 = __nccwpck_require__(88074); +const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.code) || (parsedBody === null || parsedBody === void 0 ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw (0, exceptions_1.decorateServiceException)(response, parsedBody); +}; +exports.throwDefaultError = throwDefaultError; +const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + (0, exports.throwDefaultError)({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +exports.withBaseException = withBaseException; +const deserializeMetadata = (output) => { + var _a, _b; + return ({ + httpStatusCode: output.statusCode, + requestId: (_b = (_a = output.headers["x-amzn-requestid"]) !== null && _a !== void 0 ? _a : output.headers["x-amzn-request-id"]) !== null && _b !== void 0 ? _b : output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], + }); }; /***/ }), -/***/ 18518: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = (__nccwpck_require__(28437).util); -var Transform = (__nccwpck_require__(12781).Transform); -var allocBuffer = util.buffer.alloc; - -/** @type {Transform} */ -function EventMessageChunkerStream(options) { - Transform.call(this, options); +/***/ 11163: +/***/ ((__unused_webpack_module, exports) => { - this.currentMessageTotalLength = 0; - this.currentMessagePendingLength = 0; - /** @type {Buffer} */ - this.currentMessage = null; +"use strict"; - /** @type {Buffer} */ - this.messageLengthBuffer = null; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.loadConfigsForDefaultMode = void 0; +const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; +exports.loadConfigsForDefaultMode = loadConfigsForDefaultMode; -EventMessageChunkerStream.prototype = Object.create(Transform.prototype); -/** - * - * @param {Buffer} chunk - * @param {string} encoding - * @param {*} callback - */ -EventMessageChunkerStream.prototype._transform = function(chunk, encoding, callback) { - var chunkLength = chunk.length; - var currentOffset = 0; - - while (currentOffset < chunkLength) { - // create new message if necessary - if (!this.currentMessage) { - // working on a new message, determine total length - var bytesRemaining = chunkLength - currentOffset; - // prevent edge case where total length spans 2 chunks - if (!this.messageLengthBuffer) { - this.messageLengthBuffer = allocBuffer(4); - } - var numBytesForTotal = Math.min( - 4 - this.currentMessagePendingLength, // remaining bytes to fill the messageLengthBuffer - bytesRemaining // bytes left in chunk - ); +/***/ }), - chunk.copy( - this.messageLengthBuffer, - this.currentMessagePendingLength, - currentOffset, - currentOffset + numBytesForTotal - ); +/***/ 91809: +/***/ ((__unused_webpack_module, exports) => { - this.currentMessagePendingLength += numBytesForTotal; - currentOffset += numBytesForTotal; +"use strict"; - if (this.currentMessagePendingLength < 4) { - // not enough information to create the current message - break; - } - this.allocateMessage(this.messageLengthBuffer.readUInt32BE(0)); - this.messageLengthBuffer = null; - } - - // write data into current message - var numBytesToWrite = Math.min( - this.currentMessageTotalLength - this.currentMessagePendingLength, // number of bytes left to complete message - chunkLength - currentOffset // number of bytes left in the original chunk - ); - chunk.copy( - this.currentMessage, // target buffer - this.currentMessagePendingLength, // target offset - currentOffset, // chunk offset - currentOffset + numBytesToWrite // chunk end to write - ); - this.currentMessagePendingLength += numBytesToWrite; - currentOffset += numBytesToWrite; - - // check if a message is ready to be pushed - if (this.currentMessageTotalLength && this.currentMessageTotalLength === this.currentMessagePendingLength) { - // push out the message - this.push(this.currentMessage); - // cleanup - this.currentMessage = null; - this.currentMessageTotalLength = 0; - this.currentMessagePendingLength = 0; - } - } - - callback(); -}; - -EventMessageChunkerStream.prototype._flush = function(callback) { - if (this.currentMessageTotalLength) { - if (this.currentMessageTotalLength === this.currentMessagePendingLength) { - callback(null, this.currentMessage); - } else { - callback(new Error('Truncated event message received.')); - } - } else { - callback(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.emitWarningIfUnsupportedVersion = void 0; +let warningEmitted = false; +const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 14) { + warningEmitted = true; } }; +exports.emitWarningIfUnsupportedVersion = emitWarningIfUnsupportedVersion; -/** - * @param {number} size Size of the message to be allocated. - * @api private - */ -EventMessageChunkerStream.prototype.allocateMessage = function(size) { - if (typeof size !== 'number') { - throw new Error('Attempted to allocate an event message where size was not a number: ' + size); - } - this.currentMessageTotalLength = size; - this.currentMessagePendingLength = 4; - this.currentMessage = allocBuffer(size); - this.currentMessage.writeUInt32BE(size, 0); -}; -/** - * @api private - */ -module.exports = { - EventMessageChunkerStream: EventMessageChunkerStream -}; +/***/ }), +/***/ 88074: +/***/ ((__unused_webpack_module, exports) => { -/***/ }), +"use strict"; -/***/ 73630: -/***/ ((module) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decorateServiceException = exports.ServiceException = void 0; +class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, ServiceException.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } +} +exports.ServiceException = ServiceException; +const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; +exports.decorateServiceException = decorateServiceException; -/** - * Takes in a buffer of event messages and splits them into individual messages. - * @param {Buffer} buffer - * @api private - */ -function eventMessageChunker(buffer) { - /** @type Buffer[] */ - var messages = []; - var offset = 0; - while (offset < buffer.length) { - var totalLength = buffer.readInt32BE(offset); +/***/ }), - // create new buffer for individual message (shares memory with original) - var message = buffer.slice(offset, totalLength + offset); - // increment offset to it starts at the next message - offset += totalLength; +/***/ 76016: +/***/ ((__unused_webpack_module, exports) => { - messages.push(message); - } +"use strict"; - return messages; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extendedEncodeURIComponent = void 0; +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); } - -/** - * @api private - */ -module.exports = { - eventMessageChunker: eventMessageChunker -}; +exports.extendedEncodeURIComponent = extendedEncodeURIComponent; /***/ }), -/***/ 93773: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 42638: +/***/ ((__unused_webpack_module, exports) => { -var Transform = (__nccwpck_require__(12781).Transform); -var parseEvent = (__nccwpck_require__(52123).parseEvent); +"use strict"; -/** @type {Transform} */ -function EventUnmarshallerStream(options) { - options = options || {}; - // set output to object mode - options.readableObjectMode = true; - Transform.call(this, options); - this._readableState.objectMode = true; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getArrayIfSingleItem = void 0; +const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; +exports.getArrayIfSingleItem = getArrayIfSingleItem; - this.parser = options.parser; - this.eventStreamModel = options.eventStreamModel; -} -EventUnmarshallerStream.prototype = Object.create(Transform.prototype); +/***/ }), -/** - * - * @param {Buffer} chunk - * @param {string} encoding - * @param {*} callback - */ -EventUnmarshallerStream.prototype._transform = function(chunk, encoding, callback) { - try { - var event = parseEvent(this.parser, chunk, this.eventStreamModel); - this.push(event); - return callback(); - } catch (err) { - callback(err); - } -}; +/***/ 92188: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -module.exports = { - EventUnmarshallerStream: EventUnmarshallerStream +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getValueFromTextNode = void 0; +const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = (0, exports.getValueFromTextNode)(obj[key]); + } + } + return obj; }; +exports.getValueFromTextNode = getValueFromTextNode; /***/ }), -/***/ 48583: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 63570: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var util = (__nccwpck_require__(28437).util); -var toBuffer = util.buffer.toBuffer; +"use strict"; -/** - * A lossless representation of a signed, 64-bit integer. Instances of this - * class may be used in arithmetic expressions as if they were numeric - * primitives, but the binary representation will be preserved unchanged as the - * `bytes` property of the object. The bytes should be encoded as big-endian, - * two's complement integers. - * @param {Buffer} bytes - * - * @api private - */ -function Int64(bytes) { - if (bytes.length !== 8) { - throw new Error('Int64 buffers must be exactly 8 bytes'); - } - if (!util.Buffer.isBuffer(bytes)) bytes = toBuffer(bytes); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(70438), exports); +tslib_1.__exportStar(__nccwpck_require__(61600), exports); +tslib_1.__exportStar(__nccwpck_require__(32813), exports); +tslib_1.__exportStar(__nccwpck_require__(75414), exports); +tslib_1.__exportStar(__nccwpck_require__(92541), exports); +tslib_1.__exportStar(__nccwpck_require__(56929), exports); +tslib_1.__exportStar(__nccwpck_require__(21737), exports); +tslib_1.__exportStar(__nccwpck_require__(9681), exports); +tslib_1.__exportStar(__nccwpck_require__(11163), exports); +tslib_1.__exportStar(__nccwpck_require__(91809), exports); +tslib_1.__exportStar(__nccwpck_require__(88074), exports); +tslib_1.__exportStar(__nccwpck_require__(76016), exports); +tslib_1.__exportStar(__nccwpck_require__(42638), exports); +tslib_1.__exportStar(__nccwpck_require__(92188), exports); +tslib_1.__exportStar(__nccwpck_require__(32964), exports); +tslib_1.__exportStar(__nccwpck_require__(83495), exports); +tslib_1.__exportStar(__nccwpck_require__(74857), exports); +tslib_1.__exportStar(__nccwpck_require__(15342), exports); +tslib_1.__exportStar(__nccwpck_require__(59796), exports); +tslib_1.__exportStar(__nccwpck_require__(1752), exports); +tslib_1.__exportStar(__nccwpck_require__(92480), exports); + + +/***/ }), + +/***/ 32964: +/***/ ((__unused_webpack_module, exports) => { - this.bytes = bytes; -} +"use strict"; -/** - * @param {number} number - * @returns {Int64} - * - * @api private - */ -Int64.fromNumber = function(number) { - if (number > 9223372036854775807 || number < -9223372036854775808) { - throw new Error( - number + ' is too large (or, if negative, too small) to represent as an Int64' - ); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LazyJsonString = exports.StringWrapper = void 0; +const StringWrapper = function () { + const Class = Object.getPrototypeOf(this).constructor; + const Constructor = Function.bind.apply(String, [null, ...arguments]); + const instance = new Constructor(); + Object.setPrototypeOf(instance, Class.prototype); + return instance; +}; +exports.StringWrapper = StringWrapper; +exports.StringWrapper.prototype = Object.create(String.prototype, { + constructor: { + value: exports.StringWrapper, + enumerable: false, + writable: true, + configurable: true, + }, +}); +Object.setPrototypeOf(exports.StringWrapper, String); +class LazyJsonString extends exports.StringWrapper { + deserializeJSON() { + return JSON.parse(super.toString()); } - - var bytes = new Uint8Array(8); - for ( - var i = 7, remaining = Math.abs(Math.round(number)); - i > -1 && remaining > 0; - i--, remaining /= 256 - ) { - bytes[i] = remaining; + toJSON() { + return super.toString(); } - - if (number < 0) { - negate(bytes); + static fromObject(object) { + if (object instanceof LazyJsonString) { + return object; + } + else if (object instanceof String || typeof object === "string") { + return new LazyJsonString(object); + } + return new LazyJsonString(JSON.stringify(object)); } +} +exports.LazyJsonString = LazyJsonString; - return new Int64(bytes); -}; -/** - * @returns {number} - * - * @api private - */ -Int64.prototype.valueOf = function() { - var bytes = this.bytes.slice(0); - var negative = bytes[0] & 128; - if (negative) { - negate(bytes); - } +/***/ }), - return parseInt(bytes.toString('hex'), 16) * (negative ? -1 : 1); -}; +/***/ 83495: +/***/ ((__unused_webpack_module, exports) => { -Int64.prototype.toString = function() { - return String(this.valueOf()); -}; +"use strict"; -/** - * @param {Buffer} bytes - * - * @api private - */ -function negate(bytes) { - for (var i = 0; i < 8; i++) { - bytes[i] ^= 0xFF; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.take = exports.convertMap = exports.map = void 0; +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; } - for (var i = 7; i > -1; i--) { - bytes[i]++; - if (bytes[i] !== 0) { - break; + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; } + applyInstruction(target, null, instructions, key); } + return target; } - -/** - * @api private - */ -module.exports = { - Int64: Int64 +exports.map = map; +const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +exports.convertMap = convertMap; +const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +exports.take = take; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } }; +const nonNullish = (_) => _ != null; +const pass = (_) => _; /***/ }), -/***/ 52123: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74857: +/***/ ((__unused_webpack_module, exports) => { -var parseMessage = (__nccwpck_require__(30866).parseMessage); +"use strict"; -/** - * - * @param {*} parser - * @param {Buffer} message - * @param {*} shape - * @api private - */ -function parseEvent(parser, message, shape) { - var parsedMessage = parseMessage(message); - - // check if message is an event or error - var messageType = parsedMessage.headers[':message-type']; - if (messageType) { - if (messageType.value === 'error') { - throw parseError(parsedMessage); - } else if (messageType.value !== 'event') { - // not sure how to parse non-events/non-errors, ignore for now - return; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = exports.strictParseByte = exports.strictParseShort = exports.strictParseInt32 = exports.strictParseInt = exports.strictParseLong = exports.limitedParseFloat32 = exports.limitedParseFloat = exports.handleFloat = exports.limitedParseDouble = exports.strictParseFloat32 = exports.strictParseFloat = exports.strictParseDouble = exports.expectUnion = exports.expectString = exports.expectObject = exports.expectNonNull = exports.expectByte = exports.expectShort = exports.expectInt32 = exports.expectInt = exports.expectLong = exports.expectFloat32 = exports.expectNumber = exports.expectBoolean = exports.parseBoolean = void 0; +const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +exports.parseBoolean = parseBoolean; +const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; } } - - // determine event type - var eventType = parsedMessage.headers[':event-type']; - // check that the event type is modeled - var eventModel = shape.members[eventType.value]; - if (!eventModel) { - return; + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + exports.logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } } - - var result = {}; - // check if an event payload exists - var eventPayloadMemberName = eventModel.eventPayloadMemberName; - if (eventPayloadMemberName) { - var payloadShape = eventModel.members[eventPayloadMemberName]; - // if the shape is binary, return the byte array - if (payloadShape.type === 'binary') { - result[eventPayloadMemberName] = parsedMessage.body; - } else { - result[eventPayloadMemberName] = parser.parse(parsedMessage.body.toString(), payloadShape); + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +exports.expectBoolean = expectBoolean; +const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + exports.logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; } } - - // read event headers - var eventHeaderNames = eventModel.eventHeaderMemberNames; - for (var i = 0; i < eventHeaderNames.length; i++) { - var name = eventHeaderNames[i]; - if (parsedMessage.headers[name]) { - // parse the header! - result[name] = eventModel.members[name].toType(parsedMessage.headers[name].value); + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +exports.expectNumber = expectNumber; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +const expectFloat32 = (value) => { + const expected = (0, exports.expectNumber)(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +exports.expectFloat32 = expectFloat32; +const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +exports.expectLong = expectLong; +exports.expectInt = exports.expectLong; +const expectInt32 = (value) => expectSizedInt(value, 32); +exports.expectInt32 = expectInt32; +const expectShort = (value) => expectSizedInt(value, 16); +exports.expectShort = expectShort; +const expectByte = (value) => expectSizedInt(value, 8); +exports.expectByte = expectByte; +const expectSizedInt = (value, size) => { + const expected = (0, exports.expectLong)(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +exports.expectNonNull = expectNonNull; +const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +exports.expectObject = expectObject; +const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + exports.logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +exports.expectString = expectString; +const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = (0, exports.expectObject)(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +exports.expectUnion = expectUnion; +const strictParseDouble = (value) => { + if (typeof value == "string") { + return (0, exports.expectNumber)(parseNumber(value)); + } + return (0, exports.expectNumber)(value); +}; +exports.strictParseDouble = strictParseDouble; +exports.strictParseFloat = exports.strictParseDouble; +const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return (0, exports.expectFloat32)(parseNumber(value)); + } + return (0, exports.expectFloat32)(value); +}; +exports.strictParseFloat32 = strictParseFloat32; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectNumber)(value); +}; +exports.limitedParseDouble = limitedParseDouble; +exports.handleFloat = exports.limitedParseDouble; +exports.limitedParseFloat = exports.limitedParseDouble; +const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return (0, exports.expectFloat32)(value); +}; +exports.limitedParseFloat32 = limitedParseFloat32; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +const strictParseLong = (value) => { + if (typeof value === "string") { + return (0, exports.expectLong)(parseNumber(value)); + } + return (0, exports.expectLong)(value); +}; +exports.strictParseLong = strictParseLong; +exports.strictParseInt = exports.strictParseLong; +const strictParseInt32 = (value) => { + if (typeof value === "string") { + return (0, exports.expectInt32)(parseNumber(value)); + } + return (0, exports.expectInt32)(value); +}; +exports.strictParseInt32 = strictParseInt32; +const strictParseShort = (value) => { + if (typeof value === "string") { + return (0, exports.expectShort)(parseNumber(value)); + } + return (0, exports.expectShort)(value); +}; +exports.strictParseShort = strictParseShort; +const strictParseByte = (value) => { + if (typeof value === "string") { + return (0, exports.expectByte)(parseNumber(value)); + } + return (0, exports.expectByte)(value); +}; +exports.strictParseByte = strictParseByte; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +exports.logger = { + warn: console.warn, +}; - var output = {}; - output[eventType.value] = result; - return output; -} -function parseError(message) { - var errorCode = message.headers[':error-code']; - var errorMessage = message.headers[':error-message']; - var error = new Error(errorMessage.value || errorMessage); - error.code = error.name = errorCode.value || errorCode; - return error; -} +/***/ }), -/** - * @api private - */ -module.exports = { - parseEvent: parseEvent +/***/ 15342: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolvedPath = void 0; +const extended_encode_uri_component_1 = __nccwpck_require__(76016); +const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(segment)) + .join("/") + : (0, extended_encode_uri_component_1.extendedEncodeURIComponent)(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; }; +exports.resolvedPath = resolvedPath; /***/ }), -/***/ 30866: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 59796: +/***/ ((__unused_webpack_module, exports) => { -var Int64 = (__nccwpck_require__(48583).Int64); +"use strict"; -var splitMessage = (__nccwpck_require__(71765).splitMessage); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializeFloat = void 0; +const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +exports.serializeFloat = serializeFloat; -var BOOLEAN_TAG = 'boolean'; -var BYTE_TAG = 'byte'; -var SHORT_TAG = 'short'; -var INT_TAG = 'integer'; -var LONG_TAG = 'long'; -var BINARY_TAG = 'binary'; -var STRING_TAG = 'string'; -var TIMESTAMP_TAG = 'timestamp'; -var UUID_TAG = 'uuid'; -/** - * @api private - * - * @param {Buffer} headers - */ -function parseHeaders(headers) { - var out = {}; - var position = 0; - while (position < headers.length) { - var nameLength = headers.readUInt8(position++); - var name = headers.slice(position, position + nameLength).toString(); - position += nameLength; - switch (headers.readUInt8(position++)) { - case 0 /* boolTrue */: - out[name] = { - type: BOOLEAN_TAG, - value: true - }; - break; - case 1 /* boolFalse */: - out[name] = { - type: BOOLEAN_TAG, - value: false - }; - break; - case 2 /* byte */: - out[name] = { - type: BYTE_TAG, - value: headers.readInt8(position++) - }; - break; - case 3 /* short */: - out[name] = { - type: SHORT_TAG, - value: headers.readInt16BE(position) - }; - position += 2; - break; - case 4 /* integer */: - out[name] = { - type: INT_TAG, - value: headers.readInt32BE(position) - }; - position += 4; - break; - case 5 /* long */: - out[name] = { - type: LONG_TAG, - value: new Int64(headers.slice(position, position + 8)) - }; - position += 8; - break; - case 6 /* byteArray */: - var binaryLength = headers.readUInt16BE(position); - position += 2; - out[name] = { - type: BINARY_TAG, - value: headers.slice(position, position + binaryLength) - }; - position += binaryLength; - break; - case 7 /* string */: - var stringLength = headers.readUInt16BE(position); - position += 2; - out[name] = { - type: STRING_TAG, - value: headers.slice( - position, - position + stringLength - ).toString() - }; - position += stringLength; - break; - case 8 /* timestamp */: - out[name] = { - type: TIMESTAMP_TAG, - value: new Date( - new Int64(headers.slice(position, position + 8)) - .valueOf() - ) - }; - position += 8; - break; - case 9 /* uuid */: - var uuidChars = headers.slice(position, position + 16) - .toString('hex'); - position += 16; - out[name] = { - type: UUID_TAG, - value: uuidChars.substr(0, 8) + '-' + - uuidChars.substr(8, 4) + '-' + - uuidChars.substr(12, 4) + '-' + - uuidChars.substr(16, 4) + '-' + - uuidChars.substr(20) - }; - break; - default: - throw new Error('Unrecognized header type tag'); - } - } - return out; -} +/***/ }), -function parseMessage(message) { - var parsed = splitMessage(message); - return { headers: parseHeaders(parsed.headers), body: parsed.body }; -} +/***/ 1752: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -module.exports = { - parseMessage: parseMessage +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._json = void 0; +const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = (0, exports._json)(obj[key]); + } + return target; + } + return obj; }; +exports._json = _json; /***/ }), -/***/ 71765: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = (__nccwpck_require__(28437).util); -var toBuffer = util.buffer.toBuffer; - -// All prelude components are unsigned, 32-bit integers -var PRELUDE_MEMBER_LENGTH = 4; -// The prelude consists of two components -var PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; -// Checksums are always CRC32 hashes. -var CHECKSUM_LENGTH = 4; -// Messages must include a full prelude, a prelude checksum, and a message checksum -var MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; +/***/ 92480: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - * - * @param {Buffer} message - */ -function splitMessage(message) { - if (!util.Buffer.isBuffer(message)) message = toBuffer(message); +"use strict"; - if (message.length < MINIMUM_MESSAGE_LENGTH) { - throw new Error('Provided message too short to accommodate event stream message overhead'); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitEvery = void 0; +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); } - - if (message.length !== message.readUInt32BE(0)) { - throw new Error('Reported message length does not match received message length'); + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +exports.splitEvery = splitEvery; - var expectedPreludeChecksum = message.readUInt32BE(PRELUDE_LENGTH); - if ( - expectedPreludeChecksum !== util.crypto.crc32( - message.slice(0, PRELUDE_LENGTH) - ) - ) { - throw new Error( - 'The prelude checksum specified in the message (' + - expectedPreludeChecksum + - ') does not match the calculated CRC32 checksum.' - ); - } +/***/ }), - var expectedMessageChecksum = message.readUInt32BE(message.length - CHECKSUM_LENGTH); +/***/ 74075: +/***/ ((__unused_webpack_module, exports) => { - if ( - expectedMessageChecksum !== util.crypto.crc32( - message.slice(0, message.length - CHECKSUM_LENGTH) - ) - ) { - throw new Error( - 'The message checksum did not match the expected value of ' + - expectedMessageChecksum - ); - } +"use strict"; - var headersStart = PRELUDE_LENGTH + CHECKSUM_LENGTH; - var headersEnd = headersStart + message.readUInt32BE(PRELUDE_MEMBER_LENGTH); +Object.defineProperty(exports, "__esModule", ({ value: true })); - return { - headers: message.slice(headersStart, headersEnd), - body: message.slice(headersEnd, message.length - CHECKSUM_LENGTH), - }; -} -/** - * @api private - */ -module.exports = { - splitMessage: splitMessage -}; +/***/ }), +/***/ 48960: +/***/ ((__unused_webpack_module, exports) => { -/***/ }), +"use strict"; -/***/ 69643: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpAuthLocation = void 0; +var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation = exports.HttpAuthLocation || (exports.HttpAuthLocation = {})); -/** - * What is necessary to create an event stream in node? - * - http response stream - * - parser - * - event stream model - */ -var EventMessageChunkerStream = (__nccwpck_require__(18518).EventMessageChunkerStream); -var EventUnmarshallerStream = (__nccwpck_require__(93773).EventUnmarshallerStream); +/***/ }), -function createEventStream(stream, parser, model) { - var eventStream = new EventUnmarshallerStream({ - parser: parser, - eventStreamModel: model - }); +/***/ 63274: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; - var eventMessageChunker = new EventMessageChunkerStream(); +Object.defineProperty(exports, "__esModule", ({ value: true })); - stream.pipe( - eventMessageChunker - ).pipe(eventStream); - stream.on('error', function(err) { - eventMessageChunker.emit('error', err); - }); +/***/ }), - eventMessageChunker.on('error', function(err) { - eventStream.emit('error', err); - }); +/***/ 78340: +/***/ ((__unused_webpack_module, exports) => { - return eventStream; -} +"use strict"; -/** - * @api private - */ -module.exports = { - createEventStream: createEventStream -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 54995: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 4744: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -var SequentialExecutor = __nccwpck_require__(55948); -var DISCOVER_ENDPOINT = (__nccwpck_require__(45313).discoverEndpoint); -/** - * The namespace used to register global event listeners for request building - * and sending. - */ -AWS.EventListeners = { - /** - * @!attribute VALIDATE_CREDENTIALS - * A request listener that validates whether the request is being - * sent with credentials. - * Handles the {AWS.Request~validate 'validate' Request event} - * @example Sending a request without validating credentials - * var listener = AWS.EventListeners.Core.VALIDATE_CREDENTIALS; - * request.removeListener('validate', listener); - * @readonly - * @return [Function] - * @!attribute VALIDATE_REGION - * A request listener that validates whether the region is set - * for a request. - * Handles the {AWS.Request~validate 'validate' Request event} - * @example Sending a request without validating region configuration - * var listener = AWS.EventListeners.Core.VALIDATE_REGION; - * request.removeListener('validate', listener); - * @readonly - * @return [Function] - * @!attribute VALIDATE_PARAMETERS - * A request listener that validates input parameters in a request. - * Handles the {AWS.Request~validate 'validate' Request event} - * @example Sending a request without validating parameters - * var listener = AWS.EventListeners.Core.VALIDATE_PARAMETERS; - * request.removeListener('validate', listener); - * @example Disable parameter validation globally - * AWS.EventListeners.Core.removeListener('validate', - * AWS.EventListeners.Core.VALIDATE_REGION); - * @readonly - * @return [Function] - * @!attribute SEND - * A request listener that initiates the HTTP connection for a - * request being sent. Handles the {AWS.Request~send 'send' Request event} - * @example Replacing the HTTP handler - * var listener = AWS.EventListeners.Core.SEND; - * request.removeListener('send', listener); - * request.on('send', function(response) { - * customHandler.send(response); - * }); - * @return [Function] - * @readonly - * @!attribute HTTP_DATA - * A request listener that reads data from the HTTP connection in order - * to build the response data. - * Handles the {AWS.Request~httpData 'httpData' Request event}. - * Remove this handler if you are overriding the 'httpData' event and - * do not want extra data processing and buffering overhead. - * @example Disabling default data processing - * var listener = AWS.EventListeners.Core.HTTP_DATA; - * request.removeListener('httpData', listener); - * @return [Function] - * @readonly - */ - Core: {} /* doc hack */ -}; +"use strict"; -/** - * @api private - */ -function getOperationAuthtype(req) { - if (!req.service.api.operations) { - return ''; - } - var operation = req.service.api.operations[req.operation]; - return operation ? operation.authtype : ''; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * @api private - */ -function getIdentityType(req) { - var service = req.service; - if (service.config.signatureVersion) { - return service.config.signatureVersion; - } +/***/ }), - if (service.api.signatureVersion) { - return service.api.signatureVersion; - } +/***/ 68270: +/***/ ((__unused_webpack_module, exports) => { - return getOperationAuthtype(req); -} +"use strict"; -AWS.EventListeners = { - Core: new SequentialExecutor().addNamedListeners(function(add, addAsync) { - addAsync( - 'VALIDATE_CREDENTIALS', 'validate', - function VALIDATE_CREDENTIALS(req, done) { - if (!req.service.api.signatureVersion && !req.service.config.signatureVersion) return done(); // none +Object.defineProperty(exports, "__esModule", ({ value: true })); - var identityType = getIdentityType(req); - if (identityType === 'bearer') { - req.service.config.getToken(function(err) { - if (err) { - req.response.error = AWS.util.error(err, {code: 'TokenError'}); - } - done(); - }); - return; - } - req.service.config.getCredentials(function(err) { - if (err) { - req.response.error = AWS.util.error(err, - { - code: 'CredentialsError', - message: 'Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1' - } - ); - } - done(); - }); - }); +/***/ }), - add('VALIDATE_REGION', 'validate', function VALIDATE_REGION(req) { - if (!req.service.isGlobalEndpoint) { - var dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); - if (!req.service.config.region) { - req.response.error = AWS.util.error(new Error(), - {code: 'ConfigError', message: 'Missing region in config'}); - } else if (!dnsHostRegex.test(req.service.config.region)) { - req.response.error = AWS.util.error(new Error(), - {code: 'ConfigError', message: 'Invalid region in config'}); - } - } - }); +/***/ 39580: +/***/ ((__unused_webpack_module, exports) => { - add('BUILD_IDEMPOTENCY_TOKENS', 'validate', function BUILD_IDEMPOTENCY_TOKENS(req) { - if (!req.service.api.operations) { - return; - } - var operation = req.service.api.operations[req.operation]; - if (!operation) { - return; - } - var idempotentMembers = operation.idempotentMembers; - if (!idempotentMembers.length) { - return; - } - // creates a copy of params so user's param object isn't mutated - var params = AWS.util.copy(req.params); - for (var i = 0, iLen = idempotentMembers.length; i < iLen; i++) { - if (!params[idempotentMembers[i]]) { - // add the member - params[idempotentMembers[i]] = AWS.util.uuid.v4(); - } - } - req.params = params; - }); +"use strict"; - add('VALIDATE_PARAMETERS', 'validate', function VALIDATE_PARAMETERS(req) { - if (!req.service.api.operations) { - return; - } - var rules = req.service.api.operations[req.operation].input; - var validation = req.service.config.paramValidation; - new AWS.ParamValidator(validation).validate(rules, req.params); - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); - add('COMPUTE_CHECKSUM', 'afterBuild', function COMPUTE_CHECKSUM(req) { - if (!req.service.api.operations) { - return; - } - var operation = req.service.api.operations[req.operation]; - if (!operation) { - return; - } - var body = req.httpRequest.body; - var isNonStreamingPayload = body && (AWS.util.Buffer.isBuffer(body) || typeof body === 'string'); - var headers = req.httpRequest.headers; - if ( - operation.httpChecksumRequired && - req.service.config.computeChecksums && - isNonStreamingPayload && - !headers['Content-MD5'] - ) { - var md5 = AWS.util.crypto.md5(body, 'base64'); - headers['Content-MD5'] = md5; - } - }); - addAsync('COMPUTE_SHA256', 'afterBuild', function COMPUTE_SHA256(req, done) { - req.haltHandlersOnError(); - if (!req.service.api.operations) { - return; - } - var operation = req.service.api.operations[req.operation]; - var authtype = operation ? operation.authtype : ''; - if (!req.service.api.signatureVersion && !authtype && !req.service.config.signatureVersion) return done(); // none - if (req.service.getSignerClass(req) === AWS.Signers.V4) { - var body = req.httpRequest.body || ''; - if (authtype.indexOf('unsigned-body') >= 0) { - req.httpRequest.headers['X-Amz-Content-Sha256'] = 'UNSIGNED-PAYLOAD'; - return done(); - } - AWS.util.computeSha256(body, function(err, sha) { - if (err) { - done(err); - } - else { - req.httpRequest.headers['X-Amz-Content-Sha256'] = sha; - done(); - } - }); - } else { - done(); - } - }); +/***/ }), - add('SET_CONTENT_LENGTH', 'afterBuild', function SET_CONTENT_LENGTH(req) { - var authtype = getOperationAuthtype(req); - var payloadMember = AWS.util.getRequestPayloadShape(req); - if (req.httpRequest.headers['Content-Length'] === undefined) { - try { - var length = AWS.util.string.byteLength(req.httpRequest.body); - req.httpRequest.headers['Content-Length'] = length; - } catch (err) { - if (payloadMember && payloadMember.isStreaming) { - if (payloadMember.requiresLength) { - //streaming payload requires length(s3, glacier) - throw err; - } else if (authtype.indexOf('unsigned-body') >= 0) { - //unbounded streaming payload(lex, mediastore) - req.httpRequest.headers['Transfer-Encoding'] = 'chunked'; - return; - } else { - throw err; - } - } - throw err; - } - } - }); +/***/ 57628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - add('SET_HTTP_HOST', 'afterBuild', function SET_HTTP_HOST(req) { - req.httpRequest.headers['Host'] = req.httpRequest.endpoint.host; - }); +"use strict"; - add('SET_TRACE_ID', 'afterBuild', function SET_TRACE_ID(req) { - var traceIdHeaderName = 'X-Amzn-Trace-Id'; - if (AWS.util.isNode() && !Object.hasOwnProperty.call(req.httpRequest.headers, traceIdHeaderName)) { - var ENV_LAMBDA_FUNCTION_NAME = 'AWS_LAMBDA_FUNCTION_NAME'; - var ENV_TRACE_ID = '_X_AMZN_TRACE_ID'; - var functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; - var traceId = process.env[ENV_TRACE_ID]; - if ( - typeof functionName === 'string' && - functionName.length > 0 && - typeof traceId === 'string' && - traceId.length > 0 - ) { - req.httpRequest.headers[traceIdHeaderName] = traceId; - } - } - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(39580), exports); +tslib_1.__exportStar(__nccwpck_require__(98398), exports); +tslib_1.__exportStar(__nccwpck_require__(76522), exports); - add('RESTART', 'restart', function RESTART() { - var err = this.response.error; - if (!err || !err.retryable) return; - this.httpRequest = new AWS.HttpRequest( - this.service.endpoint, - this.service.region - ); +/***/ }), - if (this.response.retryCount < this.service.config.maxRetries) { - this.response.retryCount++; - } else { - this.response.error = null; - } - }); +/***/ 98398: +/***/ ((__unused_webpack_module, exports) => { - var addToHead = true; - addAsync('DISCOVER_ENDPOINT', 'sign', DISCOVER_ENDPOINT, addToHead); +"use strict"; - addAsync('SIGN', 'sign', function SIGN(req, done) { - var service = req.service; - var identityType = getIdentityType(req); - if (!identityType || identityType.length === 0) return done(); // none +Object.defineProperty(exports, "__esModule", ({ value: true })); - if (identityType === 'bearer') { - service.config.getToken(function (err, token) { - if (err) { - req.response.error = err; - return done(); - } - try { - var SignerClass = service.getSignerClass(req); - var signer = new SignerClass(req.httpRequest); - signer.addAuthorization(token); - } catch (e) { - req.response.error = e; - } - done(); - }); - } else { - service.config.getCredentials(function (err, credentials) { - if (err) { - req.response.error = err; - return done(); - } +/***/ }), - try { - var date = service.getSkewCorrectedDate(); - var SignerClass = service.getSignerClass(req); - var operations = req.service.api.operations || {}; - var operation = operations[req.operation]; - var signer = new SignerClass(req.httpRequest, - service.getSigningName(req), - { - signatureCache: service.config.signatureCache, - operation: operation, - signatureVersion: service.api.signatureVersion - }); - signer.setServiceClientId(service._clientId); - - // clear old authorization headers - delete req.httpRequest.headers['Authorization']; - delete req.httpRequest.headers['Date']; - delete req.httpRequest.headers['X-Amz-Date']; - - // add new authorization - signer.addAuthorization(credentials, date); - req.signedAt = date; - } catch (e) { - req.response.error = e; - } - done(); - }); +/***/ 76522: +/***/ ((__unused_webpack_module, exports) => { - } - }); +"use strict"; - add('VALIDATE_RESPONSE', 'validateResponse', function VALIDATE_RESPONSE(resp) { - if (this.service.successfulResponse(resp, this)) { - resp.data = {}; - resp.error = null; - } else { - resp.data = null; - resp.error = AWS.util.error(new Error(), - {code: 'UnknownError', message: 'An unknown error occurred.'}); - } - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); - add('ERROR', 'error', function ERROR(err, resp) { - var awsQueryCompatible = resp.request.service.api.awsQueryCompatible; - if (awsQueryCompatible) { - var headers = resp.httpResponse.headers; - var queryErrorCode = headers ? headers['x-amzn-query-error'] : undefined; - if (queryErrorCode && queryErrorCode.includes(';')) { - resp.error.code = queryErrorCode.split(';')[0]; - } - } - }, true); - - addAsync('SEND', 'send', function SEND(resp, done) { - resp.httpResponse._abortCallback = done; - resp.error = null; - resp.data = null; - - function callback(httpResp) { - resp.httpResponse.stream = httpResp; - var stream = resp.request.httpRequest.stream; - var service = resp.request.service; - var api = service.api; - var operationName = resp.request.operation; - var operation = api.operations[operationName] || {}; - - httpResp.on('headers', function onHeaders(statusCode, headers, statusMessage) { - resp.request.emit( - 'httpHeaders', - [statusCode, headers, resp, statusMessage] - ); - - if (!resp.httpResponse.streaming) { - if (AWS.HttpClient.streamsApiVersion === 2) { // streams2 API check - // if we detect event streams, we're going to have to - // return the stream immediately - if (operation.hasEventOutput && service.successfulResponse(resp)) { - // skip reading the IncomingStream - resp.request.emit('httpDone'); - done(); - return; - } - httpResp.on('readable', function onReadable() { - var data = httpResp.read(); - if (data !== null) { - resp.request.emit('httpData', [data, resp]); - } - }); - } else { // legacy streams API - httpResp.on('data', function onData(data) { - resp.request.emit('httpData', [data, resp]); - }); - } - } - }); +/***/ }), - httpResp.on('end', function onEnd() { - if (!stream || !stream.didCallback) { - if (AWS.HttpClient.streamsApiVersion === 2 && (operation.hasEventOutput && service.successfulResponse(resp))) { - // don't concatenate response chunks when streaming event stream data when response is successful - return; - } - resp.request.emit('httpDone'); - done(); - } - }); - } +/***/ 89035: +/***/ ((__unused_webpack_module, exports) => { - function progress(httpResp) { - httpResp.on('sendProgress', function onSendProgress(value) { - resp.request.emit('httpUploadProgress', [value, resp]); - }); +"use strict"; - httpResp.on('receiveProgress', function onReceiveProgress(value) { - resp.request.emit('httpDownloadProgress', [value, resp]); - }); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - function error(err) { - if (err.code !== 'RequestAbortedError') { - var errCode = err.code === 'TimeoutError' ? err.code : 'NetworkingError'; - err = AWS.util.error(err, { - code: errCode, - region: resp.request.httpRequest.region, - hostname: resp.request.httpRequest.endpoint.hostname, - retryable: true - }); - } - resp.error = err; - resp.request.emit('httpError', [resp.error, resp], function() { - done(); - }); - } - function executeSend() { - var http = AWS.HttpClient.getInstance(); - var httpOptions = resp.request.service.config.httpOptions || {}; - try { - var stream = http.handleRequest(resp.request.httpRequest, httpOptions, - callback, error); - progress(stream); - } catch (err) { - error(err); - } - } - var timeDiff = (resp.request.service.getSkewCorrectedDate() - this.signedAt) / 1000; - if (timeDiff >= 60 * 10) { // if we signed 10min ago, re-sign - this.emit('sign', [this], function(err) { - if (err) done(err); - else executeSend(); - }); - } else { - executeSend(); - } - }); +/***/ }), - add('HTTP_HEADERS', 'httpHeaders', - function HTTP_HEADERS(statusCode, headers, resp, statusMessage) { - resp.httpResponse.statusCode = statusCode; - resp.httpResponse.statusMessage = statusMessage; - resp.httpResponse.headers = headers; - resp.httpResponse.body = AWS.util.buffer.toBuffer(''); - resp.httpResponse.buffers = []; - resp.httpResponse.numBytes = 0; - var dateHeader = headers.date || headers.Date; - var service = resp.request.service; - if (dateHeader) { - var serverTime = Date.parse(dateHeader); - if (service.config.correctClockSkew - && service.isClockSkewed(serverTime)) { - service.applyClockOffset(serverTime); - } - } - }); +/***/ 7225: +/***/ ((__unused_webpack_module, exports) => { - add('HTTP_DATA', 'httpData', function HTTP_DATA(chunk, resp) { - if (chunk) { - if (AWS.util.isNode()) { - resp.httpResponse.numBytes += chunk.length; +"use strict"; - var total = resp.httpResponse.headers['content-length']; - var progress = { loaded: resp.httpResponse.numBytes, total: total }; - resp.request.emit('httpDownloadProgress', [progress, resp]); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - resp.httpResponse.buffers.push(AWS.util.buffer.toBuffer(chunk)); - } - }); - add('HTTP_DONE', 'httpDone', function HTTP_DONE(resp) { - // convert buffers array into single buffer - if (resp.httpResponse.buffers && resp.httpResponse.buffers.length > 0) { - var body = AWS.util.buffer.concat(resp.httpResponse.buffers); - resp.httpResponse.body = body; - } - delete resp.httpResponse.numBytes; - delete resp.httpResponse.buffers; - }); +/***/ }), - add('FINALIZE_ERROR', 'retry', function FINALIZE_ERROR(resp) { - if (resp.httpResponse.statusCode) { - resp.error.statusCode = resp.httpResponse.statusCode; - if (resp.error.retryable === undefined) { - resp.error.retryable = this.service.retryableError(resp.error, this); - } - } - }); +/***/ 54126: +/***/ ((__unused_webpack_module, exports) => { - add('INVALIDATE_CREDENTIALS', 'retry', function INVALIDATE_CREDENTIALS(resp) { - if (!resp.error) return; - switch (resp.error.code) { - case 'RequestExpired': // EC2 only - case 'ExpiredTokenException': - case 'ExpiredToken': - resp.error.retryable = true; - resp.request.service.config.credentials.expired = true; - } - }); +"use strict"; - add('EXPIRED_SIGNATURE', 'retry', function EXPIRED_SIGNATURE(resp) { - var err = resp.error; - if (!err) return; - if (typeof err.code === 'string' && typeof err.message === 'string') { - if (err.code.match(/Signature/) && err.message.match(/expired/)) { - resp.error.retryable = true; - } - } - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EndpointURLScheme = void 0; +var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme = exports.EndpointURLScheme || (exports.EndpointURLScheme = {})); - add('CLOCK_SKEWED', 'retry', function CLOCK_SKEWED(resp) { - if (!resp.error) return; - if (this.service.clockSkewError(resp.error) - && this.service.config.correctClockSkew) { - resp.error.retryable = true; - } - }); - add('REDIRECT', 'retry', function REDIRECT(resp) { - if (resp.error && resp.error.statusCode >= 300 && - resp.error.statusCode < 400 && resp.httpResponse.headers['location']) { - this.httpRequest.endpoint = - new AWS.Endpoint(resp.httpResponse.headers['location']); - this.httpRequest.headers['Host'] = this.httpRequest.endpoint.host; - resp.error.redirect = true; - resp.error.retryable = true; - } - }); +/***/ }), - add('RETRY_CHECK', 'retry', function RETRY_CHECK(resp) { - if (resp.error) { - if (resp.error.redirect && resp.redirectCount < resp.maxRedirects) { - resp.error.retryDelay = 0; - } else if (resp.retryCount < resp.maxRetries) { - resp.error.retryDelay = this.service.retryDelays(resp.retryCount, resp.error) || 0; - } - } - }); +/***/ 55612: +/***/ ((__unused_webpack_module, exports) => { - addAsync('RESET_RETRY_STATE', 'afterRetry', function RESET_RETRY_STATE(resp, done) { - var delay, willRetry = false; +"use strict"; - if (resp.error) { - delay = resp.error.retryDelay || 0; - if (resp.error.retryable && resp.retryCount < resp.maxRetries) { - resp.retryCount++; - willRetry = true; - } else if (resp.error.redirect && resp.redirectCount < resp.maxRedirects) { - resp.redirectCount++; - willRetry = true; - } - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - // delay < 0 is a signal from customBackoff to skip retries - if (willRetry && delay >= 0) { - resp.error = null; - setTimeout(done, delay); - } else { - done(); - } - }); - }), - - CorePost: new SequentialExecutor().addNamedListeners(function(add) { - add('EXTRACT_REQUEST_ID', 'extractData', AWS.util.extractRequestId); - add('EXTRACT_REQUEST_ID', 'extractError', AWS.util.extractRequestId); - - add('ENOTFOUND_ERROR', 'httpError', function ENOTFOUND_ERROR(err) { - function isDNSError(err) { - return err.errno === 'ENOTFOUND' || - typeof err.errno === 'number' && - typeof AWS.util.getSystemErrorName === 'function' && - ['EAI_NONAME', 'EAI_NODATA'].indexOf(AWS.util.getSystemErrorName(err.errno) >= 0); - } - if (err.code === 'NetworkingError' && isDNSError(err)) { - var message = 'Inaccessible host: `' + err.hostname + '\' at port `' + err.port + - '\'. This service may not be available in the `' + err.region + - '\' region.'; - this.response.error = AWS.util.error(new Error(message), { - code: 'UnknownEndpoint', - region: err.region, - hostname: err.hostname, - retryable: true, - originalError: err - }); - } - }); - }), - - Logger: new SequentialExecutor().addNamedListeners(function(add) { - add('LOG_REQUEST', 'complete', function LOG_REQUEST(resp) { - var req = resp.request; - var logger = req.service.config.logger; - if (!logger) return; - function filterSensitiveLog(inputShape, shape) { - if (!shape) { - return shape; - } - if (inputShape.isSensitive) { - return '***SensitiveInformation***'; - } - switch (inputShape.type) { - case 'structure': - var struct = {}; - AWS.util.each(shape, function(subShapeName, subShape) { - if (Object.prototype.hasOwnProperty.call(inputShape.members, subShapeName)) { - struct[subShapeName] = filterSensitiveLog(inputShape.members[subShapeName], subShape); - } else { - struct[subShapeName] = subShape; - } - }); - return struct; - case 'list': - var list = []; - AWS.util.arrayEach(shape, function(subShape, index) { - list.push(filterSensitiveLog(inputShape.member, subShape)); - }); - return list; - case 'map': - var map = {}; - AWS.util.each(shape, function(key, value) { - map[key] = filterSensitiveLog(inputShape.value, value); - }); - return map; - default: - return shape; - } - } - function buildMessage() { - var time = resp.request.service.getSkewCorrectedDate().getTime(); - var delta = (time - req.startTime.getTime()) / 1000; - var ansi = logger.isTTY ? true : false; - var status = resp.httpResponse.statusCode; - var censoredParams = req.params; - if ( - req.service.api.operations && - req.service.api.operations[req.operation] && - req.service.api.operations[req.operation].input - ) { - var inputShape = req.service.api.operations[req.operation].input; - censoredParams = filterSensitiveLog(inputShape, req.params); - } - var params = (__nccwpck_require__(73837).inspect)(censoredParams, true, null); - var message = ''; - if (ansi) message += '\x1B[33m'; - message += '[AWS ' + req.service.serviceIdentifier + ' ' + status; - message += ' ' + delta.toString() + 's ' + resp.retryCount + ' retries]'; - if (ansi) message += '\x1B[0;1m'; - message += ' ' + AWS.util.string.lowerFirst(req.operation); - message += '(' + params + ')'; - if (ansi) message += '\x1B[0m'; - return message; - } +/***/ }), - var line = buildMessage(); - if (typeof logger.log === 'function') { - logger.log(line); - } else if (typeof logger.write === 'function') { - logger.write(line + '\n'); - } - }); - }), +/***/ 43084: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); - Json: new SequentialExecutor().addNamedListeners(function(add) { - var svc = __nccwpck_require__(30083); - add('BUILD', 'build', svc.buildRequest); - add('EXTRACT_DATA', 'extractData', svc.extractData); - add('EXTRACT_ERROR', 'extractError', svc.extractError); - }), - Rest: new SequentialExecutor().addNamedListeners(function(add) { - var svc = __nccwpck_require__(98200); - add('BUILD', 'build', svc.buildRequest); - add('EXTRACT_DATA', 'extractData', svc.extractData); - add('EXTRACT_ERROR', 'extractError', svc.extractError); - }), +/***/ }), - RestJson: new SequentialExecutor().addNamedListeners(function(add) { - var svc = __nccwpck_require__(5883); - add('BUILD', 'build', svc.buildRequest); - add('EXTRACT_DATA', 'extractData', svc.extractData); - add('EXTRACT_ERROR', 'extractError', svc.extractError); - add('UNSET_CONTENT_LENGTH', 'afterBuild', svc.unsetContentLength); - }), +/***/ 89843: +/***/ ((__unused_webpack_module, exports) => { - RestXml: new SequentialExecutor().addNamedListeners(function(add) { - var svc = __nccwpck_require__(15143); - add('BUILD', 'build', svc.buildRequest); - add('EXTRACT_DATA', 'extractData', svc.extractData); - add('EXTRACT_ERROR', 'extractError', svc.extractError); - }), +"use strict"; - Query: new SequentialExecutor().addNamedListeners(function(add) { - var svc = __nccwpck_require__(90761); - add('BUILD', 'build', svc.buildRequest); - add('EXTRACT_DATA', 'extractData', svc.extractData); - add('EXTRACT_ERROR', 'extractError', svc.extractError); - }) -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 1556: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 63799: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; +"use strict"; -/** - * The endpoint that a service will talk to, for example, - * `'https://ec2.ap-southeast-1.amazonaws.com'`. If - * you need to override an endpoint for a service, you can - * set the endpoint on a service by passing the endpoint - * object with the `endpoint` option key: - * - * ```javascript - * var ep = new AWS.Endpoint('awsproxy.example.com'); - * var s3 = new AWS.S3({endpoint: ep}); - * s3.service.endpoint.hostname == 'awsproxy.example.com' - * ``` - * - * Note that if you do not specify a protocol, the protocol will - * be selected based on your current {AWS.config} configuration. - * - * @!attribute protocol - * @return [String] the protocol (http or https) of the endpoint - * URL - * @!attribute hostname - * @return [String] the host portion of the endpoint, e.g., - * example.com - * @!attribute host - * @return [String] the host portion of the endpoint including - * the port, e.g., example.com:80 - * @!attribute port - * @return [Integer] the port of the endpoint - * @!attribute href - * @return [String] the full URL of the endpoint - */ -AWS.Endpoint = inherit({ +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * @overload Endpoint(endpoint) - * Constructs a new endpoint given an endpoint URL. If the - * URL omits a protocol (http or https), the default protocol - * set in the global {AWS.config} will be used. - * @param endpoint [String] the URL to construct an endpoint from - */ - constructor: function Endpoint(endpoint, config) { - AWS.util.hideProperties(this, ['slashes', 'auth', 'hash', 'search', 'query']); - if (typeof endpoint === 'undefined' || endpoint === null) { - throw new Error('Invalid endpoint: ' + endpoint); - } else if (typeof endpoint !== 'string') { - return AWS.util.copy(endpoint); - } +/***/ }), - if (!endpoint.match(/^http/)) { - var useSSL = config && config.sslEnabled !== undefined ? - config.sslEnabled : AWS.config.sslEnabled; - endpoint = (useSSL ? 'https' : 'http') + '://' + endpoint; - } +/***/ 21550: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - AWS.util.update(this, AWS.util.urlParse(endpoint)); +"use strict"; - // Ensure the port property is set as an integer - if (this.port) { - this.port = parseInt(this.port, 10); - } else { - this.port = this.protocol === 'https:' ? 443 : 80; - } - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(55612), exports); +tslib_1.__exportStar(__nccwpck_require__(43084), exports); +tslib_1.__exportStar(__nccwpck_require__(89843), exports); +tslib_1.__exportStar(__nccwpck_require__(57658), exports); +tslib_1.__exportStar(__nccwpck_require__(63799), exports); -}); -/** - * The low level HTTP request object, encapsulating all HTTP header - * and body data sent by a service request. - * - * @!attribute method - * @return [String] the HTTP method of the request - * @!attribute path - * @return [String] the path portion of the URI, e.g., - * "/list/?start=5&num=10" - * @!attribute headers - * @return [map] - * a map of header keys and their respective values - * @!attribute body - * @return [String] the request body payload - * @!attribute endpoint - * @return [AWS.Endpoint] the endpoint for the request - * @!attribute region - * @api private - * @return [String] the region, for signing purposes only. - */ -AWS.HttpRequest = inherit({ - - /** - * @api private - */ - constructor: function HttpRequest(endpoint, region) { - endpoint = new AWS.Endpoint(endpoint); - this.method = 'POST'; - this.path = endpoint.path || '/'; - this.headers = {}; - this.body = ''; - this.endpoint = endpoint; - this.region = region; - this._userAgent = ''; - this.setUserAgent(); - }, +/***/ }), - /** - * @api private - */ - setUserAgent: function setUserAgent() { - this._userAgent = this.headers[this.getUserAgentHeaderName()] = AWS.util.userAgent(); - }, +/***/ 57658: +/***/ ((__unused_webpack_module, exports) => { - getUserAgentHeaderName: function getUserAgentHeaderName() { - var prefix = AWS.util.isBrowser() ? 'X-Amz-' : ''; - return prefix + 'User-Agent'; - }, +"use strict"; - /** - * @api private - */ - appendToUserAgent: function appendToUserAgent(agentPartial) { - if (typeof agentPartial === 'string' && agentPartial) { - this._userAgent += ' ' + agentPartial; - } - this.headers[this.getUserAgentHeaderName()] = this._userAgent; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * @api private - */ - getUserAgent: function getUserAgent() { - return this._userAgent; - }, - /** - * @return [String] the part of the {path} excluding the - * query string - */ - pathname: function pathname() { - return this.path.split('?', 1)[0]; - }, +/***/ }), - /** - * @return [String] the query string portion of the {path} - */ - search: function search() { - var query = this.path.split('?', 2)[1]; - if (query) { - query = AWS.util.queryStringParse(query); - return AWS.util.queryParamsToString(query); - } - return ''; - }, +/***/ 88508: +/***/ ((__unused_webpack_module, exports) => { - /** - * @api private - * update httpRequest endpoint with endpoint string - */ - updateEndpoint: function updateEndpoint(endpointStr) { - var newEndpoint = new AWS.Endpoint(endpointStr); - this.endpoint = newEndpoint; - this.path = newEndpoint.path || '/'; - if (this.headers['Host']) { - this.headers['Host'] = newEndpoint.host; - } - } -}); +"use strict"; -/** - * The low level HTTP response object, encapsulating all HTTP header - * and body data returned from the request. - * - * @!attribute statusCode - * @return [Integer] the HTTP status code of the response (e.g., 200, 404) - * @!attribute headers - * @return [map] - * a map of response header keys and their respective values - * @!attribute body - * @return [String] the response body payload - * @!attribute [r] streaming - * @return [Boolean] whether this response is being streamed at a low-level. - * Defaults to `false` (buffered reads). Do not modify this manually, use - * {createUnbufferedStream} to convert the stream to unbuffered mode - * instead. - */ -AWS.HttpResponse = inherit({ - - /** - * @api private - */ - constructor: function HttpResponse() { - this.statusCode = undefined; - this.headers = {}; - this.body = undefined; - this.streaming = false; - this.stream = null; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * Disables buffering on the HTTP response and returns the stream for reading. - * @return [Stream, XMLHttpRequest, null] the underlying stream object. - * Use this object to directly read data off of the stream. - * @note This object is only available after the {AWS.Request~httpHeaders} - * event has fired. This method must be called prior to - * {AWS.Request~httpData}. - * @example Taking control of a stream - * request.on('httpHeaders', function(statusCode, headers) { - * if (statusCode < 300) { - * if (headers.etag === 'xyz') { - * // pipe the stream, disabling buffering - * var stream = this.response.httpResponse.createUnbufferedStream(); - * stream.pipe(process.stdout); - * } else { // abort this request and set a better error message - * this.abort(); - * this.response.error = new Error('Invalid ETag'); - * } - * } - * }).send(console.log); - */ - createUnbufferedStream: function createUnbufferedStream() { - this.streaming = true; - return this.stream; - } -}); +/***/ }), -AWS.HttpClient = inherit({}); +/***/ 8947: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -AWS.HttpClient.getInstance = function getInstance() { - if (this.singleton === undefined) { - this.singleton = new this(); - } - return this.singleton; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveChecksumRuntimeConfig = exports.getChecksumConfiguration = exports.AlgorithmId = void 0; +var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId = exports.AlgorithmId || (exports.AlgorithmId = {})); +const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + }, + }; +}; +exports.getChecksumConfiguration = getChecksumConfiguration; +const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; }; +exports.resolveChecksumRuntimeConfig = resolveChecksumRuntimeConfig; /***/ }), -/***/ 2310: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var Stream = AWS.util.stream.Stream; -var TransformStream = AWS.util.stream.Transform; -var ReadableStream = AWS.util.stream.Readable; -__nccwpck_require__(1556); -var CONNECTION_REUSE_ENV_NAME = 'AWS_NODEJS_CONNECTION_REUSE_ENABLED'; +/***/ 89169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * @api private - */ -AWS.NodeHttpClient = AWS.util.inherit({ - handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) { - var self = this; - var endpoint = httpRequest.endpoint; - var pathPrefix = ''; - if (!httpOptions) httpOptions = {}; - if (httpOptions.proxy) { - pathPrefix = endpoint.protocol + '//' + endpoint.hostname; - if (endpoint.port !== 80 && endpoint.port !== 443) { - pathPrefix += ':' + endpoint.port; - } - endpoint = new AWS.Endpoint(httpOptions.proxy); - } +"use strict"; - var useSSL = endpoint.protocol === 'https:'; - var http = useSSL ? __nccwpck_require__(95687) : __nccwpck_require__(13685); - var options = { - host: endpoint.hostname, - port: endpoint.port, - method: httpRequest.method, - headers: httpRequest.headers, - path: pathPrefix + httpRequest.path +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultRuntimeConfig = exports.getDefaultClientConfiguration = void 0; +const checksum_1 = __nccwpck_require__(8947); +const getDefaultClientConfiguration = (runtimeConfig) => { + return { + ...(0, checksum_1.getChecksumConfiguration)(runtimeConfig), }; +}; +exports.getDefaultClientConfiguration = getDefaultClientConfiguration; +const resolveDefaultRuntimeConfig = (config) => { + return { + ...(0, checksum_1.resolveChecksumRuntimeConfig)(config), + }; +}; +exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig; - if (!httpOptions.agent) { - options.agent = this.getAgent(useSSL, { - keepAlive: process.env[CONNECTION_REUSE_ENV_NAME] === '1' ? true : false - }); - } - - AWS.util.update(options, httpOptions); - delete options.proxy; // proxy isn't an HTTP option - delete options.timeout; // timeout isn't an HTTP option - var stream = http.request(options, function (httpResp) { - if (stream.didCallback) return; +/***/ }), - callback(httpResp); - httpResp.emit( - 'headers', - httpResp.statusCode, - httpResp.headers, - httpResp.statusMessage - ); - }); - httpRequest.stream = stream; // attach stream to httpRequest - stream.didCallback = false; +/***/ 47447: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // connection timeout support - if (httpOptions.connectTimeout) { - var connectTimeoutId; - stream.on('socket', function(socket) { - if (socket.connecting) { - connectTimeoutId = setTimeout(function connectTimeout() { - if (stream.didCallback) return; stream.didCallback = true; - - stream.abort(); - errCallback(AWS.util.error( - new Error('Socket timed out without establishing a connection'), - {code: 'TimeoutError'} - )); - }, httpOptions.connectTimeout); - socket.on('connect', function() { - clearTimeout(connectTimeoutId); - connectTimeoutId = null; - }); - } - }); - } - - // timeout support - stream.setTimeout(httpOptions.timeout || 0, function() { - if (stream.didCallback) return; stream.didCallback = true; - - var msg = 'Connection timed out after ' + httpOptions.timeout + 'ms'; - errCallback(AWS.util.error(new Error(msg), {code: 'TimeoutError'})); - stream.abort(); - }); +"use strict"; - stream.on('error', function(err) { - if (connectTimeoutId) { - clearTimeout(connectTimeoutId); - connectTimeoutId = null; - } - if (stream.didCallback) return; stream.didCallback = true; - if ('ECONNRESET' === err.code || 'EPIPE' === err.code || 'ETIMEDOUT' === err.code) { - errCallback(AWS.util.error(err, {code: 'TimeoutError'})); - } else { - errCallback(err); - } - }); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(89169), exports); - var expect = httpRequest.headers.Expect || httpRequest.headers.expect; - if (expect === '100-continue') { - stream.once('continue', function() { - self.writeBody(stream, httpRequest); - }); - } else { - this.writeBody(stream, httpRequest); - } - return stream; - }, +/***/ }), - writeBody: function writeBody(stream, httpRequest) { - var body = httpRequest.body; - var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10); +/***/ 18883: +/***/ ((__unused_webpack_module, exports) => { - if (body instanceof Stream) { - // For progress support of streaming content - - // pipe the data through a transform stream to emit 'sendProgress' events - var progressStream = this.progressStream(stream, totalBytes); - if (progressStream) { - body.pipe(progressStream).pipe(stream); - } else { - body.pipe(stream); - } - } else if (body) { - // The provided body is a buffer/string and is already fully available in memory - - // For performance it's best to send it as a whole by calling stream.end(body), - // Callers expect a 'sendProgress' event which is best emitted once - // the http request stream has been fully written and all data flushed. - // The use of totalBytes is important over body.length for strings where - // length is char length and not byte length. - stream.once('finish', function() { - stream.emit('sendProgress', { - loaded: totalBytes, - total: totalBytes - }); - }); - stream.end(body); - } else { - // no request body - stream.end(); - } - }, +"use strict"; - /** - * Create the https.Agent or http.Agent according to the request schema. - */ - getAgent: function getAgent(useSSL, agentOptions) { - var http = useSSL ? __nccwpck_require__(95687) : __nccwpck_require__(13685); - if (useSSL) { - if (!AWS.NodeHttpClient.sslAgent) { - AWS.NodeHttpClient.sslAgent = new http.Agent(AWS.util.merge({ - rejectUnauthorized: process.env.NODE_TLS_REJECT_UNAUTHORIZED === '0' ? false : true - }, agentOptions || {})); - AWS.NodeHttpClient.sslAgent.setMaxListeners(0); - - // delegate maxSockets to globalAgent, set a default limit of 50 if current value is Infinity. - // Users can bypass this default by supplying their own Agent as part of SDK configuration. - Object.defineProperty(AWS.NodeHttpClient.sslAgent, 'maxSockets', { - enumerable: true, - get: function() { - var defaultMaxSockets = 50; - var globalAgent = http.globalAgent; - if (globalAgent && globalAgent.maxSockets !== Infinity && typeof globalAgent.maxSockets === 'number') { - return globalAgent.maxSockets; - } - return defaultMaxSockets; - } - }); - } - return AWS.NodeHttpClient.sslAgent; - } else { - if (!AWS.NodeHttpClient.agent) { - AWS.NodeHttpClient.agent = new http.Agent(agentOptions); - } - return AWS.NodeHttpClient.agent; - } - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FieldPosition = void 0; +var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition = exports.FieldPosition || (exports.FieldPosition = {})); - progressStream: function progressStream(stream, totalBytes) { - if (typeof TransformStream === 'undefined') { - // for node 0.8 there is no streaming progress - return; - } - var loadedBytes = 0; - var reporter = new TransformStream(); - reporter._transform = function(chunk, encoding, callback) { - if (chunk) { - loadedBytes += chunk.length; - stream.emit('sendProgress', { - loaded: loadedBytes, - total: totalBytes - }); - } - callback(null, chunk); - }; - return reporter; - }, - emitter: null -}); +/***/ }), -/** - * @!ignore - */ +/***/ 7545: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -AWS.HttpClient.prototype = AWS.NodeHttpClient.prototype; +"use strict"; -/** - * @api private - */ -AWS.HttpClient.streamsApiVersion = ReadableStream ? 2 : 1; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 47495: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); - -function JsonBuilder() { } - -JsonBuilder.prototype.build = function(value, shape) { - return JSON.stringify(translate(value, shape)); -}; +/***/ 49123: +/***/ ((__unused_webpack_module, exports) => { -function translate(value, shape) { - if (!shape || value === undefined || value === null) return undefined; +"use strict"; - switch (shape.type) { - case 'structure': return translateStructure(value, shape); - case 'map': return translateMap(value, shape); - case 'list': return translateList(value, shape); - default: return translateScalar(value, shape); - } -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -function translateStructure(structure, shape) { - if (shape.isDocument) { - return structure; - } - var struct = {}; - util.each(structure, function(name, value) { - var memberShape = shape.members[name]; - if (memberShape) { - if (memberShape.location !== 'body') return; - var locationName = memberShape.isLocationName ? memberShape.name : name; - var result = translate(value, memberShape); - if (result !== undefined) struct[locationName] = result; - } - }); - return struct; -} -function translateList(list, shape) { - var out = []; - util.arrayEach(list, function(value) { - var result = translate(value, shape.member); - if (result !== undefined) out.push(result); - }); - return out; -} +/***/ }), -function translateMap(map, shape) { - var out = {}; - util.each(map, function(key, value) { - var result = translate(value, shape.value); - if (result !== undefined) out[key] = result; - }); - return out; -} +/***/ 28006: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function translateScalar(value, shape) { - return shape.toWireFormat(value); -} +"use strict"; -/** - * @api private - */ -module.exports = JsonBuilder; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(7545), exports); +tslib_1.__exportStar(__nccwpck_require__(49123), exports); /***/ }), -/***/ 5474: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55756: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var util = __nccwpck_require__(77985); +"use strict"; -function JsonParser() { } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(74075), exports); +tslib_1.__exportStar(__nccwpck_require__(48960), exports); +tslib_1.__exportStar(__nccwpck_require__(63274), exports); +tslib_1.__exportStar(__nccwpck_require__(78340), exports); +tslib_1.__exportStar(__nccwpck_require__(4744), exports); +tslib_1.__exportStar(__nccwpck_require__(68270), exports); +tslib_1.__exportStar(__nccwpck_require__(57628), exports); +tslib_1.__exportStar(__nccwpck_require__(89035), exports); +tslib_1.__exportStar(__nccwpck_require__(7225), exports); +tslib_1.__exportStar(__nccwpck_require__(54126), exports); +tslib_1.__exportStar(__nccwpck_require__(21550), exports); +tslib_1.__exportStar(__nccwpck_require__(88508), exports); +tslib_1.__exportStar(__nccwpck_require__(47447), exports); +tslib_1.__exportStar(__nccwpck_require__(18883), exports); +tslib_1.__exportStar(__nccwpck_require__(28006), exports); +tslib_1.__exportStar(__nccwpck_require__(52866), exports); +tslib_1.__exportStar(__nccwpck_require__(17756), exports); +tslib_1.__exportStar(__nccwpck_require__(45489), exports); +tslib_1.__exportStar(__nccwpck_require__(26524), exports); +tslib_1.__exportStar(__nccwpck_require__(14603), exports); +tslib_1.__exportStar(__nccwpck_require__(83752), exports); +tslib_1.__exportStar(__nccwpck_require__(30774), exports); +tslib_1.__exportStar(__nccwpck_require__(14089), exports); +tslib_1.__exportStar(__nccwpck_require__(45678), exports); +tslib_1.__exportStar(__nccwpck_require__(69926), exports); +tslib_1.__exportStar(__nccwpck_require__(9945), exports); +tslib_1.__exportStar(__nccwpck_require__(28564), exports); +tslib_1.__exportStar(__nccwpck_require__(61285), exports); +tslib_1.__exportStar(__nccwpck_require__(50364), exports); +tslib_1.__exportStar(__nccwpck_require__(69304), exports); +tslib_1.__exportStar(__nccwpck_require__(10375), exports); +tslib_1.__exportStar(__nccwpck_require__(66894), exports); +tslib_1.__exportStar(__nccwpck_require__(57887), exports); +tslib_1.__exportStar(__nccwpck_require__(66255), exports); + + +/***/ }), + +/***/ 52866: +/***/ ((__unused_webpack_module, exports) => { -JsonParser.prototype.parse = function(value, shape) { - return translate(JSON.parse(value), shape); -}; +"use strict"; -function translate(value, shape) { - if (!shape || value === undefined) return undefined; +Object.defineProperty(exports, "__esModule", ({ value: true })); - switch (shape.type) { - case 'structure': return translateStructure(value, shape); - case 'map': return translateMap(value, shape); - case 'list': return translateList(value, shape); - default: return translateScalar(value, shape); - } -} -function translateStructure(structure, shape) { - if (structure == null) return undefined; - if (shape.isDocument) return structure; +/***/ }), - var struct = {}; - var shapeMembers = shape.members; - util.each(shapeMembers, function(name, memberShape) { - var locationName = memberShape.isLocationName ? memberShape.name : name; - if (Object.prototype.hasOwnProperty.call(structure, locationName)) { - var value = structure[locationName]; - var result = translate(value, memberShape); - if (result !== undefined) struct[name] = result; - } - }); - return struct; -} +/***/ 17756: +/***/ ((__unused_webpack_module, exports) => { -function translateList(list, shape) { - if (list == null) return undefined; +"use strict"; - var out = []; - util.arrayEach(list, function(value) { - var result = translate(value, shape.member); - if (result === undefined) out.push(null); - else out.push(result); - }); - return out; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -function translateMap(map, shape) { - if (map == null) return undefined; - var out = {}; - util.each(map, function(key, value) { - var result = translate(value, shape.value); - if (result === undefined) out[key] = null; - else out[key] = result; - }); - return out; -} +/***/ }), -function translateScalar(value, shape) { - return shape.toType(value); -} +/***/ 45489: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -module.exports = JsonParser; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 93985: -/***/ ((module) => { +/***/ 26524: +/***/ ((__unused_webpack_module, exports) => { -var warning = [ - 'We are formalizing our plans to enter AWS SDK for JavaScript (v2) into maintenance mode in 2023.\n', - 'Please migrate your code to use AWS SDK for JavaScript (v3).', - 'For more information, check the migration guide at https://a.co/7PzMCcy' -].join('\n'); +"use strict"; -module.exports = { - suppress: false -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * To suppress this message: - * @example - * require('aws-sdk/lib/maintenance_mode_message').suppress = true; - */ -function emitWarning() { - if (typeof process === 'undefined') - return; - // Skip maintenance mode message in Lambda environments - if ( - typeof process.env === 'object' && - typeof process.env.AWS_EXECUTION_ENV !== 'undefined' && - process.env.AWS_EXECUTION_ENV.indexOf('AWS_Lambda_') === 0 - ) { - return; - } +/***/ }), - if ( - typeof process.env === 'object' && - typeof process.env.AWS_SDK_JS_SUPPRESS_MAINTENANCE_MODE_MESSAGE !== 'undefined' - ) { - return; - } +/***/ 14603: +/***/ ((__unused_webpack_module, exports) => { - if (typeof process.emitWarning === 'function') { - process.emitWarning(warning, { - type: 'NOTE' - }); - } -} +"use strict"; -setTimeout(function () { - if (!module.exports.suppress) { - emitWarning(); - } -}, 0); +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 25768: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 83752: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); -__nccwpck_require__(1556); -var inherit = AWS.util.inherit; -var getMetadataServiceEndpoint = __nccwpck_require__(608); -var URL = (__nccwpck_require__(57310).URL); +"use strict"; -/** - * Represents a metadata service available on EC2 instances. Using the - * {request} method, you can receieve metadata about any available resource - * on the metadata service. - * - * You can disable the use of the IMDS by setting the AWS_EC2_METADATA_DISABLED - * environment variable to a truthy value. - * - * @!attribute [r] httpOptions - * @return [map] a map of options to pass to the underlying HTTP request: - * - * * **timeout** (Number) — a timeout value in milliseconds to wait - * before aborting the connection. Set to 0 for no timeout. - * - * @!macro nobrowser - */ -AWS.MetadataService = inherit({ - /** - * @return [String] the endpoint of the instance metadata service - */ - endpoint: getMetadataServiceEndpoint(), - - /** - * @!ignore - */ - - /** - * Default HTTP options. By default, the metadata service is set to not - * timeout on long requests. This means that on non-EC2 machines, this - * request will never return. If you are calling this operation from an - * environment that may not always run on EC2, set a `timeout` value so - * the SDK will abort the request after a given number of milliseconds. - */ - httpOptions: { timeout: 0 }, - - /** - * when enabled, metadata service will not fetch token - */ - disableFetchToken: false, - - /** - * Creates a new MetadataService object with a given set of options. - * - * @option options host [String] the hostname of the instance metadata - * service - * @option options httpOptions [map] a map of options to pass to the - * underlying HTTP request: - * - * * **timeout** (Number) — a timeout value in milliseconds to wait - * before aborting the connection. Set to 0 for no timeout. - * @option options maxRetries [Integer] the maximum number of retries to - * perform for timeout errors - * @option options retryDelayOptions [map] A set of options to configure the - * retry delay on retryable errors. See AWS.Config for details. - */ - constructor: function MetadataService(options) { - if (options && options.host) { - options.endpoint = 'http://' + options.host; - delete options.host; - } - AWS.util.update(this, options); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * Sends a request to the instance metadata service for a given resource. - * - * @param path [String] the path of the resource to get - * - * @param options [map] an optional map used to make request - * - * * **method** (String) — HTTP request method - * - * * **headers** (map) — a map of response header keys and their respective values - * - * @callback callback function(err, data) - * Called when a response is available from the service. - * @param err [Error, null] if an error occurred, this value will be set - * @param data [String, null] if the request was successful, the body of - * the response - */ - request: function request(path, options, callback) { - if (arguments.length === 2) { - callback = options; - options = {}; - } - - if (process.env[AWS.util.imdsDisabledEnv]) { - callback(new Error('EC2 Instance Metadata Service access disabled')); - return; - } - path = path || '/'; +/***/ }), - // Verify that host is a valid URL - if (URL) { new URL(this.endpoint); } +/***/ 30774: +/***/ ((__unused_webpack_module, exports) => { - var httpRequest = new AWS.HttpRequest(this.endpoint + path); - httpRequest.method = options.method || 'GET'; - if (options.headers) { - httpRequest.headers = options.headers; - } - AWS.util.handleRequestWithRetries(httpRequest, this, callback); - }, +"use strict"; - /** - * @api private - */ - loadCredentialsCallbacks: [], - - /** - * Fetches metadata token used for getting credentials - * - * @api private - * @callback callback function(err, token) - * Called when token is loaded from the resource - */ - fetchMetadataToken: function fetchMetadataToken(callback) { - var self = this; - var tokenFetchPath = '/latest/api/token'; - self.request( - tokenFetchPath, - { - 'method': 'PUT', - 'headers': { - 'x-aws-ec2-metadata-token-ttl-seconds': '21600' - } - }, - callback - ); - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); - /** - * Fetches credentials - * - * @api private - * @callback cb function(err, creds) - * Called when credentials are loaded from the resource - */ - fetchCredentials: function fetchCredentials(options, cb) { - var self = this; - var basePath = '/latest/meta-data/iam/security-credentials/'; - - self.request(basePath, options, function (err, roleName) { - if (err) { - self.disableFetchToken = !(err.statusCode === 401); - cb(AWS.util.error( - err, - { - message: 'EC2 Metadata roleName request returned error' - } - )); - return; - } - roleName = roleName.split('\n')[0]; // grab first (and only) role - self.request(basePath + roleName, options, function (credErr, credData) { - if (credErr) { - self.disableFetchToken = !(credErr.statusCode === 401); - cb(AWS.util.error( - credErr, - { - message: 'EC2 Metadata creds request returned error' - } - )); - return; - } - try { - var credentials = JSON.parse(credData); - cb(null, credentials); - } catch (parseError) { - cb(parseError); - } - }); - }); - }, - /** - * Loads a set of credentials stored in the instance metadata service - * - * @api private - * @callback callback function(err, credentials) - * Called when credentials are loaded from the resource - * @param err [Error] if an error occurred, this value will be set - * @param credentials [Object] the raw JSON object containing all - * metadata from the credentials resource - */ - loadCredentials: function loadCredentials(callback) { - var self = this; - self.loadCredentialsCallbacks.push(callback); - if (self.loadCredentialsCallbacks.length > 1) { return; } - - function callbacks(err, creds) { - var cb; - while ((cb = self.loadCredentialsCallbacks.shift()) !== undefined) { - cb(err, creds); - } - } +/***/ }), - if (self.disableFetchToken) { - self.fetchCredentials({}, callbacks); - } else { - self.fetchMetadataToken(function(tokenError, token) { - if (tokenError) { - if (tokenError.code === 'TimeoutError') { - self.disableFetchToken = true; - } else if (tokenError.retryable === true) { - callbacks(AWS.util.error( - tokenError, - { - message: 'EC2 Metadata token request returned error' - } - )); - return; - } else if (tokenError.statusCode === 400) { - callbacks(AWS.util.error( - tokenError, - { - message: 'EC2 Metadata token request returned 400' - } - )); - return; - } - } - var options = {}; - if (token) { - options.headers = { - 'x-aws-ec2-metadata-token': token - }; - } - self.fetchCredentials(options, callbacks); - }); +/***/ 14089: +/***/ ((__unused_webpack_module, exports) => { - } - } -}); +"use strict"; -/** - * @api private - */ -module.exports = AWS.MetadataService; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 83205: -/***/ ((module) => { +/***/ 45678: +/***/ ((__unused_webpack_module, exports) => { -var getEndpoint = function() { - return { - IPv4: 'http://169.254.169.254', - IPv6: 'http://[fd00:ec2::254]', - }; -}; +"use strict"; -module.exports = getEndpoint; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 95578: -/***/ ((module) => { - -var ENV_ENDPOINT_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT'; -var CONFIG_ENDPOINT_NAME = 'ec2_metadata_service_endpoint'; +/***/ 69926: +/***/ ((__unused_webpack_module, exports) => { -var getEndpointConfigOptions = function() { - return { - environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_NAME]; }, - configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_NAME]; }, - default: undefined, - }; -}; +"use strict"; -module.exports = getEndpointConfigOptions; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 37997: -/***/ ((module) => { +/***/ 9945: +/***/ ((__unused_webpack_module, exports) => { -var getEndpointMode = function() { - return { - IPv4: 'IPv4', - IPv6: 'IPv6', - }; -}; +"use strict"; -module.exports = getEndpointMode; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 45509: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var EndpointMode = __nccwpck_require__(37997)(); - -var ENV_ENDPOINT_MODE_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE'; -var CONFIG_ENDPOINT_MODE_NAME = 'ec2_metadata_service_endpoint_mode'; +/***/ 28564: +/***/ ((__unused_webpack_module, exports) => { -var getEndpointModeConfigOptions = function() { - return { - environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_MODE_NAME]; }, - configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_MODE_NAME]; }, - default: EndpointMode.IPv4, - }; -}; +"use strict"; -module.exports = getEndpointModeConfigOptions; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 608: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 61285: +/***/ ((__unused_webpack_module, exports) => { -var AWS = __nccwpck_require__(28437); +"use strict"; -var Endpoint = __nccwpck_require__(83205)(); -var EndpointMode = __nccwpck_require__(37997)(); +Object.defineProperty(exports, "__esModule", ({ value: true })); -var ENDPOINT_CONFIG_OPTIONS = __nccwpck_require__(95578)(); -var ENDPOINT_MODE_CONFIG_OPTIONS = __nccwpck_require__(45509)(); -var getMetadataServiceEndpoint = function() { - var endpoint = AWS.util.loadConfig(ENDPOINT_CONFIG_OPTIONS); - if (endpoint !== undefined) return endpoint; +/***/ }), - var endpointMode = AWS.util.loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS); - switch (endpointMode) { - case EndpointMode.IPv4: - return Endpoint.IPv4; - case EndpointMode.IPv6: - return Endpoint.IPv6; - default: - throw new Error('Unsupported endpoint mode: ' + endpointMode); - } -}; +/***/ 50364: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; -module.exports = getMetadataServiceEndpoint; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RequestHandlerProtocol = void 0; +var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol = exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {})); /***/ }), -/***/ 17657: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 69304: +/***/ ((__unused_webpack_module, exports) => { -var Collection = __nccwpck_require__(71965); -var Operation = __nccwpck_require__(28083); -var Shape = __nccwpck_require__(71349); -var Paginator = __nccwpck_require__(45938); -var ResourceWaiter = __nccwpck_require__(41368); -var metadata = __nccwpck_require__(17752); +"use strict"; -var util = __nccwpck_require__(77985); -var property = util.property; -var memoizedProperty = util.memoizedProperty; +Object.defineProperty(exports, "__esModule", ({ value: true })); -function Api(api, options) { - var self = this; - api = api || {}; - options = options || {}; - options.api = this; - - api.metadata = api.metadata || {}; - - var serviceIdentifier = options.serviceIdentifier; - delete options.serviceIdentifier; - - property(this, 'isApi', true, false); - property(this, 'apiVersion', api.metadata.apiVersion); - property(this, 'endpointPrefix', api.metadata.endpointPrefix); - property(this, 'signingName', api.metadata.signingName); - property(this, 'globalEndpoint', api.metadata.globalEndpoint); - property(this, 'signatureVersion', api.metadata.signatureVersion); - property(this, 'jsonVersion', api.metadata.jsonVersion); - property(this, 'targetPrefix', api.metadata.targetPrefix); - property(this, 'protocol', api.metadata.protocol); - property(this, 'timestampFormat', api.metadata.timestampFormat); - property(this, 'xmlNamespaceUri', api.metadata.xmlNamespace); - property(this, 'abbreviation', api.metadata.serviceAbbreviation); - property(this, 'fullName', api.metadata.serviceFullName); - property(this, 'serviceId', api.metadata.serviceId); - if (serviceIdentifier && metadata[serviceIdentifier]) { - property(this, 'xmlNoDefaultLists', metadata[serviceIdentifier].xmlNoDefaultLists, false); - } - memoizedProperty(this, 'className', function() { - var name = api.metadata.serviceAbbreviation || api.metadata.serviceFullName; - if (!name) return null; +/***/ }), - name = name.replace(/^Amazon|AWS\s*|\(.*|\s+|\W+/g, ''); - if (name === 'ElasticLoadBalancing') name = 'ELB'; - return name; - }); +/***/ 10375: +/***/ ((__unused_webpack_module, exports) => { - function addEndpointOperation(name, operation) { - if (operation.endpointoperation === true) { - property(self, 'endpointOperation', util.string.lowerFirst(name)); - } - if (operation.endpointdiscovery && !self.hasRequiredEndpointDiscovery) { - property( - self, - 'hasRequiredEndpointDiscovery', - operation.endpointdiscovery.required === true - ); - } - } +"use strict"; - property(this, 'operations', new Collection(api.operations, options, function(name, operation) { - return new Operation(name, operation, options); - }, util.string.lowerFirst, addEndpointOperation)); +Object.defineProperty(exports, "__esModule", ({ value: true })); - property(this, 'shapes', new Collection(api.shapes, options, function(name, shape) { - return Shape.create(shape, options); - })); - property(this, 'paginators', new Collection(api.paginators, options, function(name, paginator) { - return new Paginator(name, paginator, options); - })); +/***/ }), - property(this, 'waiters', new Collection(api.waiters, options, function(name, waiter) { - return new ResourceWaiter(name, waiter, options); - }, util.string.lowerFirst)); +/***/ 66894: +/***/ ((__unused_webpack_module, exports) => { - if (options.documentation) { - property(this, 'documentation', api.documentation); - property(this, 'documentationUrl', api.documentationUrl); - } - property(this, 'awsQueryCompatible', api.metadata.awsQueryCompatible); -} +"use strict"; -/** - * @api private - */ -module.exports = Api; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 71965: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var memoizedProperty = (__nccwpck_require__(77985).memoizedProperty); - -function memoize(name, value, factory, nameTr) { - memoizedProperty(this, nameTr(name), function() { - return factory(name, value); - }); -} - -function Collection(iterable, options, factory, nameTr, callback) { - nameTr = nameTr || String; - var self = this; +/***/ 57887: +/***/ ((__unused_webpack_module, exports) => { - for (var id in iterable) { - if (Object.prototype.hasOwnProperty.call(iterable, id)) { - memoize.call(self, id, iterable[id], factory, nameTr); - if (callback) callback(id, iterable[id]); - } - } -} +"use strict"; -/** - * @api private - */ -module.exports = Collection; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 28083: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 66255: +/***/ ((__unused_webpack_module, exports) => { -var Shape = __nccwpck_require__(71349); +"use strict"; -var util = __nccwpck_require__(77985); -var property = util.property; -var memoizedProperty = util.memoizedProperty; +Object.defineProperty(exports, "__esModule", ({ value: true })); -function Operation(name, operation, options) { - var self = this; - options = options || {}; - property(this, 'name', operation.name || name); - property(this, 'api', options.api, false); - - operation.http = operation.http || {}; - property(this, 'endpoint', operation.endpoint); - property(this, 'httpMethod', operation.http.method || 'POST'); - property(this, 'httpPath', operation.http.requestUri || '/'); - property(this, 'authtype', operation.authtype || ''); - property( - this, - 'endpointDiscoveryRequired', - operation.endpointdiscovery ? - (operation.endpointdiscovery.required ? 'REQUIRED' : 'OPTIONAL') : - 'NULL' - ); +/***/ }), - // httpChecksum replaces usage of httpChecksumRequired, but some APIs - // (s3control) still uses old trait. - var httpChecksumRequired = operation.httpChecksumRequired - || (operation.httpChecksum && operation.httpChecksum.requestChecksumRequired); - property(this, 'httpChecksumRequired', httpChecksumRequired, false); +/***/ 14681: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - memoizedProperty(this, 'input', function() { - if (!operation.input) { - return new Shape.create({type: 'structure'}, options); - } - return Shape.create(operation.input, options); - }); +"use strict"; - memoizedProperty(this, 'output', function() { - if (!operation.output) { - return new Shape.create({type: 'structure'}, options); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseUrl = void 0; +const querystring_parser_1 = __nccwpck_require__(4769); +const parseUrl = (url) => { + if (typeof url === "string") { + return (0, exports.parseUrl)(new URL(url)); } - return Shape.create(operation.output, options); - }); - - memoizedProperty(this, 'errors', function() { - var list = []; - if (!operation.errors) return null; - - for (var i = 0; i < operation.errors.length; i++) { - list.push(Shape.create(operation.errors[i], options)); + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, querystring_parser_1.parseQueryString)(search); } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; +exports.parseUrl = parseUrl; - return list; - }); - memoizedProperty(this, 'paginator', function() { - return options.api.paginators[name]; - }); +/***/ }), - if (options.documentation) { - property(this, 'documentation', operation.documentation); - property(this, 'documentationUrl', operation.documentationUrl); - } +/***/ 30305: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // idempotentMembers only tracks top-level input shapes - memoizedProperty(this, 'idempotentMembers', function() { - var idempotentMembers = []; - var input = self.input; - var members = input.members; - if (!input.members) { - return idempotentMembers; - } - for (var name in members) { - if (!members.hasOwnProperty(name)) { - continue; - } - if (members[name].isIdempotent === true) { - idempotentMembers.push(name); - } - } - return idempotentMembers; - }); +"use strict"; - memoizedProperty(this, 'hasEventOutput', function() { - var output = self.output; - return hasEventStream(output); - }); -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; -function hasEventStream(topLevelShape) { - var members = topLevelShape.members; - var payload = topLevelShape.payload; - if (!topLevelShape.members) { - return false; - } +/***/ }), - if (payload) { - var payloadMember = members[payload]; - return payloadMember.isEventStream; - } +/***/ 75600: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // check if any member is an event stream - for (var name in members) { - if (!members.hasOwnProperty(name)) { - if (members[name].isEventStream === true) { - return true; - } - } - } - return false; -} +"use strict"; -/** - * @api private - */ -module.exports = Operation; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(30305), exports); +tslib_1.__exportStar(__nccwpck_require__(74730), exports); /***/ }), -/***/ 45938: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var property = (__nccwpck_require__(77985).property); +/***/ 74730: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function Paginator(name, paginator) { - property(this, 'inputToken', paginator.input_token); - property(this, 'limitKey', paginator.limit_key); - property(this, 'moreResults', paginator.more_results); - property(this, 'outputToken', paginator.output_token); - property(this, 'resultKey', paginator.result_key); -} +"use strict"; -/** - * @api private - */ -module.exports = Paginator; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const toBase64 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +exports.toBase64 = toBase64; /***/ }), -/***/ 41368: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); -var property = util.property; - -function ResourceWaiter(name, waiter, options) { - options = options || {}; - property(this, 'name', name); - property(this, 'api', options.api, false); +/***/ 54880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (waiter.operation) { - property(this, 'operation', util.string.lowerFirst(waiter.operation)); - } +"use strict"; - var self = this; - var keys = [ - 'type', - 'description', - 'delay', - 'maxAttempts', - 'acceptors' - ]; - - keys.forEach(function(key) { - var value = waiter[key]; - if (value) { - property(self, key, value); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.calculateBodyLength = void 0; +const fs_1 = __nccwpck_require__(57147); +const calculateBodyLength = (body) => { + if (!body) { + return 0; } - }); -} - -/** - * @api private - */ -module.exports = ResourceWaiter; + if (typeof body === "string") { + return Buffer.from(body).length; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, fs_1.lstatSync)(body.path).size; + } + else if (typeof body.fd === "number") { + return (0, fs_1.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; +exports.calculateBodyLength = calculateBodyLength; /***/ }), -/***/ 71349: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Collection = __nccwpck_require__(71965); - -var util = __nccwpck_require__(77985); +/***/ 68075: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function property(obj, name, value) { - if (value !== null && value !== undefined) { - util.property.apply(this, arguments); - } -} +"use strict"; -function memoizedProperty(obj, name) { - if (!obj.constructor.prototype[name]) { - util.memoizedProperty.apply(this, arguments); - } -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(54880), exports); -function Shape(shape, options, memberName) { - options = options || {}; - property(this, 'shape', shape.shape); - property(this, 'api', options.api, false); - property(this, 'type', shape.type); - property(this, 'enum', shape.enum); - property(this, 'min', shape.min); - property(this, 'max', shape.max); - property(this, 'pattern', shape.pattern); - property(this, 'location', shape.location || this.location || 'body'); - property(this, 'name', this.name || shape.xmlName || shape.queryName || - shape.locationName || memberName); - property(this, 'isStreaming', shape.streaming || this.isStreaming || false); - property(this, 'requiresLength', shape.requiresLength, false); - property(this, 'isComposite', shape.isComposite || false); - property(this, 'isShape', true, false); - property(this, 'isQueryName', Boolean(shape.queryName), false); - property(this, 'isLocationName', Boolean(shape.locationName), false); - property(this, 'isIdempotent', shape.idempotencyToken === true); - property(this, 'isJsonValue', shape.jsonvalue === true); - property(this, 'isSensitive', shape.sensitive === true || shape.prototype && shape.prototype.sensitive === true); - property(this, 'isEventStream', Boolean(shape.eventstream), false); - property(this, 'isEvent', Boolean(shape.event), false); - property(this, 'isEventPayload', Boolean(shape.eventpayload), false); - property(this, 'isEventHeader', Boolean(shape.eventheader), false); - property(this, 'isTimestampFormatSet', Boolean(shape.timestampFormat) || shape.prototype && shape.prototype.isTimestampFormatSet === true, false); - property(this, 'endpointDiscoveryId', Boolean(shape.endpointdiscoveryid), false); - property(this, 'hostLabel', Boolean(shape.hostLabel), false); - - if (options.documentation) { - property(this, 'documentation', shape.documentation); - property(this, 'documentationUrl', shape.documentationUrl); - } +/***/ }), - if (shape.xmlAttribute) { - property(this, 'isXmlAttribute', shape.xmlAttribute || false); - } +/***/ 31381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // type conversion and parsing - property(this, 'defaultValue', null); - this.toWireFormat = function(value) { - if (value === null || value === undefined) return ''; - return value; - }; - this.toType = function(value) { return value; }; -} +"use strict"; -/** - * @api private - */ -Shape.normalizedTypes = { - character: 'string', - double: 'float', - long: 'integer', - short: 'integer', - biginteger: 'integer', - bigdecimal: 'float', - blob: 'binary' +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromString = exports.fromArrayBuffer = void 0; +const is_array_buffer_1 = __nccwpck_require__(10780); +const buffer_1 = __nccwpck_require__(14300); +const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!(0, is_array_buffer_1.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return buffer_1.Buffer.from(input, offset, length); }; - -/** - * @api private - */ -Shape.types = { - 'structure': StructureShape, - 'list': ListShape, - 'map': MapShape, - 'boolean': BooleanShape, - 'timestamp': TimestampShape, - 'float': FloatShape, - 'integer': IntegerShape, - 'string': StringShape, - 'base64': Base64Shape, - 'binary': BinaryShape -}; - -Shape.resolve = function resolve(shape, options) { - if (shape.shape) { - var refShape = options.api.shapes[shape.shape]; - if (!refShape) { - throw new Error('Cannot find shape reference: ' + shape.shape); - } - - return refShape; - } else { - return null; - } +exports.fromArrayBuffer = fromArrayBuffer; +const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? buffer_1.Buffer.from(input, encoding) : buffer_1.Buffer.from(input); }; +exports.fromString = fromString; -Shape.create = function create(shape, options, memberName) { - if (shape.isShape) return shape; - var refShape = Shape.resolve(shape, options); - if (refShape) { - var filteredKeys = Object.keys(shape); - if (!options.documentation) { - filteredKeys = filteredKeys.filter(function(name) { - return !name.match(/documentation/); - }); - } +/***/ }), - // create an inline shape with extra members - var InlineShape = function() { - refShape.constructor.call(this, shape, options, memberName); - }; - InlineShape.prototype = refShape; - return new InlineShape(); - } else { - // set type if not set - if (!shape.type) { - if (shape.members) shape.type = 'structure'; - else if (shape.member) shape.type = 'list'; - else if (shape.key) shape.type = 'map'; - else shape.type = 'string'; - } +/***/ 42491: +/***/ ((__unused_webpack_module, exports) => { - // normalize types - var origType = shape.type; - if (Shape.normalizedTypes[shape.type]) { - shape.type = Shape.normalizedTypes[shape.type]; - } +"use strict"; - if (Shape.types[shape.type]) { - return new Shape.types[shape.type](shape, options, memberName); - } else { - throw new Error('Unrecognized shape type: ' + origType); - } - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.booleanSelector = exports.SelectorType = void 0; +var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType = exports.SelectorType || (exports.SelectorType = {})); +const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); }; +exports.booleanSelector = booleanSelector; -function CompositeShape(shape) { - Shape.apply(this, arguments); - property(this, 'isComposite', true); - if (shape.flattened) { - property(this, 'flattened', shape.flattened || false); - } -} +/***/ }), -function StructureShape(shape, options) { - var self = this; - var requiredMap = null, firstInit = !this.isShape; +/***/ 83375: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - CompositeShape.apply(this, arguments); +"use strict"; - if (firstInit) { - property(this, 'defaultValue', function() { return {}; }); - property(this, 'members', {}); - property(this, 'memberNames', []); - property(this, 'required', []); - property(this, 'isRequired', function() { return false; }); - property(this, 'isDocument', Boolean(shape.document)); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(42491), exports); - if (shape.members) { - property(this, 'members', new Collection(shape.members, options, function(name, member) { - return Shape.create(member, options, name); - })); - memoizedProperty(this, 'memberNames', function() { - return shape.xmlOrder || Object.keys(shape.members); - }); - if (shape.event) { - memoizedProperty(this, 'eventPayloadMemberName', function() { - var members = self.members; - var memberNames = self.memberNames; - // iterate over members to find ones that are event payloads - for (var i = 0, iLen = memberNames.length; i < iLen; i++) { - if (members[memberNames[i]].isEventPayload) { - return memberNames[i]; - } - } - }); +/***/ }), - memoizedProperty(this, 'eventHeaderMemberNames', function() { - var members = self.members; - var memberNames = self.memberNames; - var eventHeaderMemberNames = []; - // iterate over members to find ones that are event headers - for (var i = 0, iLen = memberNames.length; i < iLen; i++) { - if (members[memberNames[i]].isEventHeader) { - eventHeaderMemberNames.push(memberNames[i]); - } - } - return eventHeaderMemberNames; - }); - } - } +/***/ 56470: +/***/ ((__unused_webpack_module, exports) => { - if (shape.required) { - property(this, 'required', shape.required); - property(this, 'isRequired', function(name) { - if (!requiredMap) { - requiredMap = {}; - for (var i = 0; i < shape.required.length; i++) { - requiredMap[shape.required[i]] = true; - } - } +"use strict"; - return requiredMap[name]; - }, false, true); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IMDS_REGION_PATH = exports.DEFAULTS_MODE_OPTIONS = exports.ENV_IMDS_DISABLED = exports.AWS_DEFAULT_REGION_ENV = exports.AWS_REGION_ENV = exports.AWS_EXECUTION_ENV = void 0; +exports.AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +exports.AWS_REGION_ENV = "AWS_REGION"; +exports.AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +exports.ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +exports.IMDS_REGION_PATH = "/latest/meta-data/placement/region"; - property(this, 'resultWrapper', shape.resultWrapper || null); - if (shape.payload) { - property(this, 'payload', shape.payload); - } +/***/ }), - if (typeof shape.xmlNamespace === 'string') { - property(this, 'xmlNamespaceUri', shape.xmlNamespace); - } else if (typeof shape.xmlNamespace === 'object') { - property(this, 'xmlNamespacePrefix', shape.xmlNamespace.prefix); - property(this, 'xmlNamespaceUri', shape.xmlNamespace.uri); - } -} +/***/ 15577: +/***/ ((__unused_webpack_module, exports) => { -function ListShape(shape, options) { - var self = this, firstInit = !this.isShape; - CompositeShape.apply(this, arguments); +"use strict"; - if (firstInit) { - property(this, 'defaultValue', function() { return []; }); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = void 0; +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +exports.NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; - if (shape.member) { - memoizedProperty(this, 'member', function() { - return Shape.create(shape.member, options); - }); - } - if (this.flattened) { - var oldName = this.name; - memoizedProperty(this, 'name', function() { - return self.member.name || oldName; - }); - } -} +/***/ }), -function MapShape(shape, options) { - var firstInit = !this.isShape; - CompositeShape.apply(this, arguments); +/***/ 72429: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (firstInit) { - property(this, 'defaultValue', function() { return {}; }); - property(this, 'key', Shape.create({type: 'string'}, options)); - property(this, 'value', Shape.create({type: 'string'}, options)); - } +"use strict"; - if (shape.key) { - memoizedProperty(this, 'key', function() { - return Shape.create(shape.key, options); - }); - } - if (shape.value) { - memoizedProperty(this, 'value', function() { - return Shape.create(shape.value, options); - }); - } -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(46217), exports); -function TimestampShape(shape) { - var self = this; - Shape.apply(this, arguments); - - if (shape.timestampFormat) { - property(this, 'timestampFormat', shape.timestampFormat); - } else if (self.isTimestampFormatSet && this.timestampFormat) { - property(this, 'timestampFormat', this.timestampFormat); - } else if (this.location === 'header') { - property(this, 'timestampFormat', 'rfc822'); - } else if (this.location === 'querystring') { - property(this, 'timestampFormat', 'iso8601'); - } else if (this.api) { - switch (this.api.protocol) { - case 'json': - case 'rest-json': - property(this, 'timestampFormat', 'unixTimestamp'); - break; - case 'rest-xml': - case 'query': - case 'ec2': - property(this, 'timestampFormat', 'iso8601'); - break; - } - } - this.toType = function(value) { - if (value === null || value === undefined) return null; - if (typeof value.toUTCString === 'function') return value; - return typeof value === 'string' || typeof value === 'number' ? - util.date.parseTimestamp(value) : null; - }; +/***/ }), - this.toWireFormat = function(value) { - return util.date.format(value, self.timestampFormat); - }; -} +/***/ 46217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function StringShape() { - Shape.apply(this, arguments); +"use strict"; - var nullLessProtocols = ['rest-xml', 'query', 'ec2']; - this.toType = function(value) { - value = this.api && nullLessProtocols.indexOf(this.api.protocol) > -1 ? - value || '' : value; - if (this.isJsonValue) { - return JSON.parse(value); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveDefaultsModeConfig = void 0; +const config_resolver_1 = __nccwpck_require__(53098); +const credential_provider_imds_1 = __nccwpck_require__(7477); +const node_config_provider_1 = __nccwpck_require__(33461); +const property_provider_1 = __nccwpck_require__(79721); +const constants_1 = __nccwpck_require__(56470); +const defaultsModeConfig_1 = __nccwpck_require__(15577); +const resolveDefaultsModeConfig = ({ region = (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS), defaultsMode = (0, node_config_provider_1.loadConfig)(defaultsModeConfig_1.NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + var _a; + if (process.env[constants_1.AWS_EXECUTION_ENV] && (process.env[constants_1.AWS_REGION_ENV] || process.env[constants_1.AWS_DEFAULT_REGION_ENV])) { + return (_a = process.env[constants_1.AWS_REGION_ENV]) !== null && _a !== void 0 ? _a : process.env[constants_1.AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[constants_1.ENV_IMDS_DISABLED]) { + try { + const endpoint = await (0, credential_provider_imds_1.getInstanceMetadataEndpoint)(); + return (await (0, credential_provider_imds_1.httpRequest)({ ...endpoint, path: constants_1.IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; - return value && typeof value.toString === 'function' ? - value.toString() : value; - }; - - this.toWireFormat = function(value) { - return this.isJsonValue ? JSON.stringify(value) : value; - }; -} - -function FloatShape() { - Shape.apply(this, arguments); - this.toType = function(value) { - if (value === null || value === undefined) return null; - return parseFloat(value); - }; - this.toWireFormat = this.toType; -} +/***/ }), -function IntegerShape() { - Shape.apply(this, arguments); +/***/ 45364: +/***/ ((__unused_webpack_module, exports) => { - this.toType = function(value) { - if (value === null || value === undefined) return null; - return parseInt(value, 10); - }; - this.toWireFormat = this.toType; -} - -function BinaryShape() { - Shape.apply(this, arguments); - this.toType = function(value) { - var buf = util.base64.decode(value); - if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') { - /* Node.js can create a Buffer that is not isolated. - * i.e. buf.byteLength !== buf.buffer.byteLength - * This means that the sensitive data is accessible to anyone with access to buf.buffer. - * If this is the node shared Buffer, then other code within this process _could_ find this secret. - * Copy sensitive data to an isolated Buffer and zero the sensitive data. - * While this is safe to do here, copying this code somewhere else may produce unexpected results. - */ - var secureBuf = util.Buffer.alloc(buf.length, buf); - buf.fill(0); - buf = secureBuf; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toHex = exports.fromHex = void 0; +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } } - return buf; - }; - this.toWireFormat = util.base64.encode; + return out; } - -function Base64Shape() { - BinaryShape.apply(this, arguments); +exports.fromHex = fromHex; +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; } +exports.toHex = toHex; -function BooleanShape() { - Shape.apply(this, arguments); - this.toType = function(value) { - if (typeof value === 'boolean') return value; - if (value === null || value === undefined) return null; - return value === 'true'; - }; -} +/***/ }), -/** - * @api private - */ -Shape.shapes = { - StructureShape: StructureShape, - ListShape: ListShape, - MapShape: MapShape, - StringShape: StringShape, - BooleanShape: BooleanShape, - Base64Shape: Base64Shape -}; +/***/ 2390: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * @api private - */ -module.exports = Shape; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(80149), exports); /***/ }), -/***/ 73639: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); - -var region_utils = __nccwpck_require__(99517); -var isFipsRegion = region_utils.isFipsRegion; -var getRealRegion = region_utils.getRealRegion; - -util.isBrowser = function() { return false; }; -util.isNode = function() { return true; }; - -// node.js specific modules -util.crypto.lib = __nccwpck_require__(6113); -util.Buffer = (__nccwpck_require__(14300).Buffer); -util.domain = __nccwpck_require__(13639); -util.stream = __nccwpck_require__(12781); -util.url = __nccwpck_require__(57310); -util.querystring = __nccwpck_require__(63477); -util.environment = 'nodejs'; -util.createEventStream = util.stream.Readable ? - (__nccwpck_require__(69643).createEventStream) : (__nccwpck_require__(63727).createEventStream); -util.realClock = __nccwpck_require__(81370); -util.clientSideMonitoring = { - Publisher: (__nccwpck_require__(66807).Publisher), - configProvider: __nccwpck_require__(91822), -}; -util.iniLoader = (__nccwpck_require__(29697)/* .iniLoader */ .b); -util.getSystemErrorName = (__nccwpck_require__(73837).getSystemErrorName); - -util.loadConfig = function(options) { - var envValue = options.environmentVariableSelector(process.env); - if (envValue !== undefined) { - return envValue; - } +/***/ 80149: +/***/ ((__unused_webpack_module, exports) => { - var configFile = {}; - try { - configFile = util.iniLoader ? util.iniLoader.loadFrom({ - isConfig: true, - filename: process.env[util.sharedConfigFileEnv] - }) : {}; - } catch (e) {} - var sharedFileConfig = configFile[ - process.env.AWS_PROFILE || util.defaultProfile - ] || {}; - var configValue = options.configFileSelector(sharedFileConfig); - if (configValue !== undefined) { - return configValue; - } +"use strict"; - if (typeof options.default === 'function') { - return options.default(); - } - return options.default; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalizeProvider = void 0; +const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; }; +exports.normalizeProvider = normalizeProvider; -var AWS; -/** - * @api private - */ -module.exports = AWS = __nccwpck_require__(28437); - -__nccwpck_require__(53819); -__nccwpck_require__(36965); -__nccwpck_require__(77360); -__nccwpck_require__(57083); -__nccwpck_require__(74998); -__nccwpck_require__(3498); -__nccwpck_require__(15037); -__nccwpck_require__(80371); - -// Load the xml2js XML parser -AWS.XML.Parser = __nccwpck_require__(96752); - -// Load Node HTTP client -__nccwpck_require__(2310); - -__nccwpck_require__(95417); - -// Load custom credential providers -__nccwpck_require__(11017); -__nccwpck_require__(73379); -__nccwpck_require__(88764); -__nccwpck_require__(10645); -__nccwpck_require__(57714); -__nccwpck_require__(27454); -__nccwpck_require__(13754); -__nccwpck_require__(80371); -__nccwpck_require__(68335); - -// Setup default providers for credentials chain -// If this changes, please update documentation for -// AWS.CredentialProviderChain.defaultProviders in -// credentials/credential_provider_chain.js -AWS.CredentialProviderChain.defaultProviders = [ - function () { return new AWS.EnvironmentCredentials('AWS'); }, - function () { return new AWS.EnvironmentCredentials('AMAZON'); }, - function () { return new AWS.SsoCredentials(); }, - function () { return new AWS.SharedIniFileCredentials(); }, - function () { return new AWS.ECSCredentials(); }, - function () { return new AWS.ProcessCredentials(); }, - function () { return new AWS.TokenFileWebIdentityCredentials(); }, - function () { return new AWS.EC2MetadataCredentials(); } -]; +/***/ }), -// Load custom token providers -__nccwpck_require__(82647); -__nccwpck_require__(50126); -__nccwpck_require__(90327); - -// Setup default providers for token chain -// If this changes, please update documentation for -// AWS.TokenProviderChain.defaultProviders in -// token/token_provider_chain.js -AWS.TokenProviderChain.defaultProviders = [ - function () { return new AWS.SSOTokenProvider(); }, -]; +/***/ 65053: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var getRegion = function() { - var env = process.env; - var region = env.AWS_REGION || env.AMAZON_REGION; - if (env[AWS.util.configOptInEnv]) { - var toCheck = [ - {filename: env[AWS.util.sharedCredentialsFileEnv]}, - {isConfig: true, filename: env[AWS.util.sharedConfigFileEnv]} - ]; - var iniLoader = AWS.util.iniLoader; - while (!region && toCheck.length) { - var configFile = {}; - var fileInfo = toCheck.shift(); - try { - configFile = iniLoader.loadFrom(fileInfo); - } catch (err) { - if (fileInfo.isConfig) throw err; - } - var profile = configFile[env.AWS_PROFILE || AWS.util.defaultProfile]; - region = profile && profile.region; +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AdaptiveRetryStrategy = void 0; +const config_1 = __nccwpck_require__(93435); +const DefaultRateLimiter_1 = __nccwpck_require__(22234); +const StandardRetryStrategy_1 = __nccwpck_require__(48361); +class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = config_1.RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options !== null && options !== void 0 ? options : {}; + this.rateLimiter = rateLimiter !== null && rateLimiter !== void 0 ? rateLimiter : new DefaultRateLimiter_1.DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy_1.StandardRetryStrategy(maxAttemptsProvider); } - } - return region; -}; + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} +exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy; -var getBooleanValue = function(value) { - return value === 'true' ? true: value === 'false' ? false: undefined; -}; -var USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: function(env) { - return getBooleanValue(env['AWS_USE_FIPS_ENDPOINT']); - }, - configFileSelector: function(profile) { - return getBooleanValue(profile['use_fips_endpoint']); - }, - default: false, -}; +/***/ }), -var USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { - environmentVariableSelector: function(env) { - return getBooleanValue(env['AWS_USE_DUALSTACK_ENDPOINT']); - }, - configFileSelector: function(profile) { - return getBooleanValue(profile['use_dualstack_endpoint']); - }, - default: false, -}; - -// Update configuration keys -AWS.util.update(AWS.Config.prototype.keys, { - credentials: function () { - var credentials = null; - new AWS.CredentialProviderChain([ - function () { return new AWS.EnvironmentCredentials('AWS'); }, - function () { return new AWS.EnvironmentCredentials('AMAZON'); }, - function () { return new AWS.SharedIniFileCredentials({ disableAssumeRole: true }); } - ]).resolve(function(err, creds) { - if (!err) credentials = creds; - }); - return credentials; - }, - credentialProvider: function() { - return new AWS.CredentialProviderChain(); - }, - logger: function () { - return process.env.AWSJS_DEBUG ? console : null; - }, - region: function() { - var region = getRegion(); - return region ? getRealRegion(region): undefined; - }, - tokenProvider: function() { - return new AWS.TokenProviderChain(); - }, - useFipsEndpoint: function() { - var region = getRegion(); - return isFipsRegion(region) - ? true - : util.loadConfig(USE_FIPS_ENDPOINT_CONFIG_OPTIONS); - }, - useDualstackEndpoint: function() { - return util.loadConfig(USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS); - } -}); +/***/ 25689: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; -// Reset configuration -AWS.config = new AWS.Config(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ConfiguredRetryStrategy = void 0; +const constants_1 = __nccwpck_require__(66302); +const StandardRetryStrategy_1 = __nccwpck_require__(48361); +class ConfiguredRetryStrategy extends StandardRetryStrategy_1.StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = constants_1.DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} +exports.ConfiguredRetryStrategy = ConfiguredRetryStrategy; /***/ }), -/***/ 99127: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); +/***/ 22234: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * @api private - */ -AWS.ParamValidator = AWS.util.inherit({ - /** - * Create a new validator object. - * - * @param validation [Boolean|map] whether input parameters should be - * validated against the operation description before sending the - * request. Pass a map to enable any of the following specific - * validation features: - * - * * **min** [Boolean] — Validates that a value meets the min - * constraint. This is enabled by default when paramValidation is set - * to `true`. - * * **max** [Boolean] — Validates that a value meets the max - * constraint. - * * **pattern** [Boolean] — Validates that a string value matches a - * regular expression. - * * **enum** [Boolean] — Validates that a string value matches one - * of the allowable enum values. - */ - constructor: function ParamValidator(validation) { - if (validation === true || validation === undefined) { - validation = {'min': true}; - } - this.validation = validation; - }, +"use strict"; - validate: function validate(shape, params, context) { - this.errors = []; - this.validateMember(shape, params || {}, context || 'params'); - - if (this.errors.length > 1) { - var msg = this.errors.join('\n* '); - msg = 'There were ' + this.errors.length + - ' validation errors:\n* ' + msg; - throw AWS.util.error(new Error(msg), - {code: 'MultipleValidationErrors', errors: this.errors}); - } else if (this.errors.length === 1) { - throw this.errors[0]; - } else { - return true; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultRateLimiter = void 0; +const service_error_classification_1 = __nccwpck_require__(6375); +class DefaultRateLimiter { + constructor(options) { + var _a, _b, _c, _d, _e; + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = (_a = options === null || options === void 0 ? void 0 : options.beta) !== null && _a !== void 0 ? _a : 0.7; + this.minCapacity = (_b = options === null || options === void 0 ? void 0 : options.minCapacity) !== null && _b !== void 0 ? _b : 1; + this.minFillRate = (_c = options === null || options === void 0 ? void 0 : options.minFillRate) !== null && _c !== void 0 ? _c : 0.5; + this.scaleConstant = (_d = options === null || options === void 0 ? void 0 : options.scaleConstant) !== null && _d !== void 0 ? _d : 0.4; + this.smooth = (_e = options === null || options === void 0 ? void 0 : options.smooth) !== null && _e !== void 0 ? _e : 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; } - }, - - fail: function fail(code, message) { - this.errors.push(AWS.util.error(new Error(message), {code: code})); - }, - - validateStructure: function validateStructure(shape, params, context) { - if (shape.isDocument) return true; - - this.validateType(params, context, ['object'], 'structure'); - var paramName; - for (var i = 0; shape.required && i < shape.required.length; i++) { - paramName = shape.required[i]; - var value = params[paramName]; - if (value === undefined || value === null) { - this.fail('MissingRequiredParameter', - 'Missing required key \'' + paramName + '\' in ' + context); - } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, service_error_classification_1.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); } +} +exports.DefaultRateLimiter = DefaultRateLimiter; - // validate hash members - for (paramName in params) { - if (!Object.prototype.hasOwnProperty.call(params, paramName)) continue; - var paramValue = params[paramName], - memberShape = shape.members[paramName]; +/***/ }), - if (memberShape !== undefined) { - var memberContext = [context, paramName].join('.'); - this.validateMember(memberShape, paramValue, memberContext); - } else if (paramValue !== undefined && paramValue !== null) { - this.fail('UnexpectedParameter', - 'Unexpected key \'' + paramName + '\' found in ' + context); - } - } +/***/ 48361: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return true; - }, +"use strict"; - validateMember: function validateMember(shape, param, context) { - switch (shape.type) { - case 'structure': - return this.validateStructure(shape, param, context); - case 'list': - return this.validateList(shape, param, context); - case 'map': - return this.validateMap(shape, param, context); - default: - return this.validateScalar(shape, param, context); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StandardRetryStrategy = void 0; +const config_1 = __nccwpck_require__(93435); +const constants_1 = __nccwpck_require__(66302); +const defaultRetryBackoffStrategy_1 = __nccwpck_require__(21337); +const defaultRetryToken_1 = __nccwpck_require__(1127); +class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = config_1.RETRY_MODES.STANDARD; + this.capacity = constants_1.INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = (0, defaultRetryBackoffStrategy_1.getDefaultRetryBackoffStrategy)(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay: constants_1.DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); } - }, - - validateList: function validateList(shape, params, context) { - if (this.validateType(params, context, [Array])) { - this.validateRange(shape, params.length, context, 'list member count'); - // validate array members - for (var i = 0; i < params.length; i++) { - this.validateMember(shape.member, params[i], context + '[' + i + ']'); - } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? constants_1.THROTTLING_RETRY_DELAY_BASE : constants_1.DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return (0, defaultRetryToken_1.createDefaultRetryToken)({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); } - }, - - validateMap: function validateMap(shape, params, context) { - if (this.validateType(params, context, ['object'], 'map')) { - // Build up a count of map members to validate range traits. - var mapCount = 0; - for (var param in params) { - if (!Object.prototype.hasOwnProperty.call(params, param)) continue; - // Validate any map key trait constraints - this.validateMember(shape.key, param, - context + '[key=\'' + param + '\']'); - this.validateMember(shape.value, params[param], - context + '[\'' + param + '\']'); - mapCount++; - } - this.validateRange(shape, mapCount, context, 'map member count'); + recordSuccess(token) { + var _a; + this.capacity = Math.max(constants_1.INITIAL_RETRY_TOKENS, this.capacity + ((_a = token.getRetryCost()) !== null && _a !== void 0 ? _a : constants_1.NO_RETRY_INCREMENT)); } - }, - - validateScalar: function validateScalar(shape, value, context) { - switch (shape.type) { - case null: - case undefined: - case 'string': - return this.validateString(shape, value, context); - case 'base64': - case 'binary': - return this.validatePayload(value, context); - case 'integer': - case 'float': - return this.validateNumber(shape, value, context); - case 'boolean': - return this.validateType(value, context, ['boolean']); - case 'timestamp': - return this.validateType(value, context, [Date, - /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/, 'number'], - 'Date object, ISO-8601 string, or a UNIX timestamp'); - default: - return this.fail('UnkownType', 'Unhandled type ' + - shape.type + ' for ' + context); + getCapacity() { + return this.capacity; } - }, - - validateString: function validateString(shape, value, context) { - var validTypes = ['string']; - if (shape.isJsonValue) { - validTypes = validTypes.concat(['number', 'object', 'boolean']); + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${config_1.DEFAULT_MAX_ATTEMPTS}`); + return config_1.DEFAULT_MAX_ATTEMPTS; + } } - if (value !== null && this.validateType(value, context, validTypes)) { - this.validateEnum(shape, value, context); - this.validateRange(shape, value.length, context, 'string length'); - this.validatePattern(shape, value, context); - this.validateUri(shape, value, context); + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); } - }, - - validateUri: function validateUri(shape, value, context) { - if (shape['location'] === 'uri') { - if (value.length === 0) { - this.fail('UriParameterError', 'Expected uri parameter to have length >= 1,' - + ' but found "' + value +'" for ' + context); - } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? constants_1.TIMEOUT_RETRY_COST : constants_1.RETRY_COST; } - }, - - validatePattern: function validatePattern(shape, value, context) { - if (this.validation['pattern'] && shape['pattern'] !== undefined) { - if (!(new RegExp(shape['pattern'])).test(value)) { - this.fail('PatternMatchError', 'Provided value "' + value + '" ' - + 'does not match regex pattern /' + shape['pattern'] + '/ for ' - + context); - } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; } - }, +} +exports.StandardRetryStrategy = StandardRetryStrategy; - validateRange: function validateRange(shape, value, context, descriptor) { - if (this.validation['min']) { - if (shape['min'] !== undefined && value < shape['min']) { - this.fail('MinRangeError', 'Expected ' + descriptor + ' >= ' - + shape['min'] + ', but found ' + value + ' for ' + context); - } - } - if (this.validation['max']) { - if (shape['max'] !== undefined && value > shape['max']) { - this.fail('MaxRangeError', 'Expected ' + descriptor + ' <= ' - + shape['max'] + ', but found ' + value + ' for ' + context); - } - } - }, - validateEnum: function validateRange(shape, value, context) { - if (this.validation['enum'] && shape['enum'] !== undefined) { - // Fail if the string value is not present in the enum list - if (shape['enum'].indexOf(value) === -1) { - this.fail('EnumError', 'Found string value of ' + value + ', but ' - + 'expected ' + shape['enum'].join('|') + ' for ' + context); - } - } - }, +/***/ }), - validateType: function validateType(value, context, acceptedTypes, type) { - // We will not log an error for null or undefined, but we will return - // false so that callers know that the expected type was not strictly met. - if (value === null || value === undefined) return false; - - var foundInvalidType = false; - for (var i = 0; i < acceptedTypes.length; i++) { - if (typeof acceptedTypes[i] === 'string') { - if (typeof value === acceptedTypes[i]) return true; - } else if (acceptedTypes[i] instanceof RegExp) { - if ((value || '').toString().match(acceptedTypes[i])) return true; - } else { - if (value instanceof acceptedTypes[i]) return true; - if (AWS.util.isType(value, acceptedTypes[i])) return true; - if (!type && !foundInvalidType) acceptedTypes = acceptedTypes.slice(); - acceptedTypes[i] = AWS.util.typeName(acceptedTypes[i]); - } - foundInvalidType = true; - } +/***/ 93435: +/***/ ((__unused_webpack_module, exports) => { - var acceptedType = type; - if (!acceptedType) { - acceptedType = acceptedTypes.join(', ').replace(/,([^,]+)$/, ', or$1'); - } +"use strict"; - var vowel = acceptedType.match(/^[aeiou]/i) ? 'n' : ''; - this.fail('InvalidParameterType', 'Expected ' + context + ' to be a' + - vowel + ' ' + acceptedType); - return false; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_RETRY_MODE = exports.DEFAULT_MAX_ATTEMPTS = exports.RETRY_MODES = void 0; +var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES = exports.RETRY_MODES || (exports.RETRY_MODES = {})); +exports.DEFAULT_MAX_ATTEMPTS = 3; +exports.DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; - validateNumber: function validateNumber(shape, value, context) { - if (value === null || value === undefined) return; - if (typeof value === 'string') { - var castedValue = parseFloat(value); - if (castedValue.toString() === value) value = castedValue; - } - if (this.validateType(value, context, ['number'])) { - this.validateRange(shape, value, context, 'numeric value'); - } - }, - validatePayload: function validatePayload(value, context) { - if (value === null || value === undefined) return; - if (typeof value === 'string') return; - if (value && typeof value.byteLength === 'number') return; // typed arrays - if (AWS.util.isNode()) { // special check for buffer/stream in Node.js - var Stream = AWS.util.stream.Stream; - if (AWS.util.Buffer.isBuffer(value) || value instanceof Stream) return; - } else { - if (typeof Blob !== void 0 && value instanceof Blob) return; - } +/***/ }), - var types = ['Buffer', 'Stream', 'File', 'Blob', 'ArrayBuffer', 'DataView']; - if (value) { - for (var i = 0; i < types.length; i++) { - if (AWS.util.isType(value, types[i])) return; - if (AWS.util.typeName(value.constructor) === types[i]) return; - } - } +/***/ 66302: +/***/ ((__unused_webpack_module, exports) => { - this.fail('InvalidParameterType', 'Expected ' + context + ' to be a ' + - 'string, Buffer, Stream, Blob, or typed array object'); - } -}); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.REQUEST_HEADER = exports.INVOCATION_ID_HEADER = exports.NO_RETRY_INCREMENT = exports.TIMEOUT_RETRY_COST = exports.RETRY_COST = exports.INITIAL_RETRY_TOKENS = exports.THROTTLING_RETRY_DELAY_BASE = exports.MAXIMUM_RETRY_DELAY = exports.DEFAULT_RETRY_DELAY_BASE = void 0; +exports.DEFAULT_RETRY_DELAY_BASE = 100; +exports.MAXIMUM_RETRY_DELAY = 20 * 1000; +exports.THROTTLING_RETRY_DELAY_BASE = 500; +exports.INITIAL_RETRY_TOKENS = 500; +exports.RETRY_COST = 5; +exports.TIMEOUT_RETRY_COST = 10; +exports.NO_RETRY_INCREMENT = 1; +exports.INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +exports.REQUEST_HEADER = "amz-sdk-request"; /***/ }), -/***/ 44086: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 21337: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var AWS = __nccwpck_require__(28437); -var rest = AWS.Protocol.Rest; +"use strict"; -/** - * A presigner object can be used to generate presigned urls for the Polly service. - */ -AWS.Polly.Presigner = AWS.util.inherit({ - /** - * Creates a presigner object with a set of configuration options. - * - * @option options params [map] An optional map of parameters to bind to every - * request sent by this service object. - * @option options service [AWS.Polly] An optional pre-configured instance - * of the AWS.Polly service object to use for requests. The object may - * bound parameters used by the presigner. - * @see AWS.Polly.constructor - */ - constructor: function Signer(options) { - options = options || {}; - this.options = options; - this.service = options.service; - this.bindServiceObject(options); - this._operations = {}; - }, +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDefaultRetryBackoffStrategy = void 0; +const constants_1 = __nccwpck_require__(66302); +const getDefaultRetryBackoffStrategy = () => { + let delayBase = constants_1.DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(constants_1.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; +exports.getDefaultRetryBackoffStrategy = getDefaultRetryBackoffStrategy; - /** - * @api private - */ - bindServiceObject: function bindServiceObject(options) { - options = options || {}; - if (!this.service) { - this.service = new AWS.Polly(options); - } else { - var config = AWS.util.copy(this.service.config); - this.service = new this.service.constructor.__super__(config); - this.service.config.params = AWS.util.merge(this.service.config.params || {}, options.params); - } - }, - /** - * @api private - */ - modifyInputMembers: function modifyInputMembers(input) { - // make copies of the input so we don't overwrite the api - // need to be careful to copy anything we access/modify - var modifiedInput = AWS.util.copy(input); - modifiedInput.members = AWS.util.copy(input.members); - AWS.util.each(input.members, function(name, member) { - modifiedInput.members[name] = AWS.util.copy(member); - // update location and locationName - if (!member.location || member.location === 'body') { - modifiedInput.members[name].location = 'querystring'; - modifiedInput.members[name].locationName = name; - } - }); - return modifiedInput; - }, +/***/ }), - /** - * @api private - */ - convertPostToGet: function convertPostToGet(req) { - // convert method - req.httpRequest.method = 'GET'; +/***/ 1127: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var operation = req.service.api.operations[req.operation]; - // get cached operation input first - var input = this._operations[req.operation]; - if (!input) { - // modify the original input - this._operations[req.operation] = input = this.modifyInputMembers(operation.input); - } +"use strict"; - var uri = rest.generateURI(req.httpRequest.endpoint.path, operation.httpPath, input, req.params); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultRetryToken = void 0; +const constants_1 = __nccwpck_require__(66302); +const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(constants_1.MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; +exports.createDefaultRetryToken = createDefaultRetryToken; - req.httpRequest.path = uri; - req.httpRequest.body = ''; - // don't need these headers on a GET request - delete req.httpRequest.headers['Content-Length']; - delete req.httpRequest.headers['Content-Type']; - }, +/***/ }), - /** - * @overload getSynthesizeSpeechUrl(params = {}, [expires = 3600], [callback]) - * Generate a presigned url for {AWS.Polly.synthesizeSpeech}. - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * @param params [map] parameters to pass to the operation. See the {AWS.Polly.synthesizeSpeech} - * operation for the expected operation parameters. - * @param expires [Integer] (3600) the number of seconds to expire the pre-signed URL operation in. - * Defaults to 1 hour. - * @return [string] if called synchronously (with no callback), returns the signed URL. - * @return [null] nothing is returned if a callback is provided. - * @callback callback function (err, url) - * If a callback is supplied, it is called when a signed URL has been generated. - * @param err [Error] the error object returned from the presigner. - * @param url [String] the signed URL. - * @see AWS.Polly.synthesizeSpeech - */ - getSynthesizeSpeechUrl: function getSynthesizeSpeechUrl(params, expires, callback) { - var self = this; - var request = this.service.makeRequest('synthesizeSpeech', params); - // remove existing build listeners - request.removeAllListeners('build'); - request.on('build', function(req) { - self.convertPostToGet(req); - }); - return request.presign(expires, callback); - } -}); +/***/ 84902: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; -/***/ }), +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(65053), exports); +tslib_1.__exportStar(__nccwpck_require__(25689), exports); +tslib_1.__exportStar(__nccwpck_require__(22234), exports); +tslib_1.__exportStar(__nccwpck_require__(48361), exports); +tslib_1.__exportStar(__nccwpck_require__(93435), exports); +tslib_1.__exportStar(__nccwpck_require__(66302), exports); +tslib_1.__exportStar(__nccwpck_require__(75427), exports); -/***/ 97969: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var util = __nccwpck_require__(77985); -var AWS = __nccwpck_require__(28437); +/***/ }), -/** - * Prepend prefix defined by API model to endpoint that's already - * constructed. This feature does not apply to operations using - * endpoint discovery and can be disabled. - * @api private - */ -function populateHostPrefix(request) { - var enabled = request.service.config.hostPrefixEnabled; - if (!enabled) return request; - var operationModel = request.service.api.operations[request.operation]; - //don't marshal host prefix when operation has endpoint discovery traits - if (hasEndpointDiscover(request)) return request; - if (operationModel.endpoint && operationModel.endpoint.hostPrefix) { - var hostPrefixNotation = operationModel.endpoint.hostPrefix; - var hostPrefix = expandHostPrefix(hostPrefixNotation, request.params, operationModel.input); - prependEndpointPrefix(request.httpRequest.endpoint, hostPrefix); - validateHostname(request.httpRequest.endpoint.hostname); - } - return request; -} +/***/ 75427: +/***/ ((__unused_webpack_module, exports) => { -/** - * @api private - */ -function hasEndpointDiscover(request) { - var api = request.service.api; - var operationModel = api.operations[request.operation]; - var isEndpointOperation = api.endpointOperation && (api.endpointOperation === util.string.lowerFirst(operationModel.name)); - return (operationModel.endpointDiscoveryRequired !== 'NULL' || isEndpointOperation === true); -} +"use strict"; -/** - * @api private - */ -function expandHostPrefix(hostPrefixNotation, params, shape) { - util.each(shape.members, function(name, member) { - if (member.hostLabel === true) { - if (typeof params[name] !== 'string' || params[name] === '') { - throw util.error(new Error(), { - message: 'Parameter ' + name + ' should be a non-empty string.', - code: 'InvalidParameter' - }); - } - var regex = new RegExp('\\{' + name + '\\}', 'g'); - hostPrefixNotation = hostPrefixNotation.replace(regex, params[name]); - } - }); - return hostPrefixNotation; -} - -/** - * @api private - */ -function prependEndpointPrefix(endpoint, prefix) { - if (endpoint.host) { - endpoint.host = prefix + endpoint.host; - } - if (endpoint.hostname) { - endpoint.hostname = prefix + endpoint.hostname; - } -} - -/** - * @api private - */ -function validateHostname(hostname) { - var labels = hostname.split('.'); - //Reference: https://tools.ietf.org/html/rfc1123#section-2 - var hostPattern = /^[a-zA-Z0-9]{1}$|^[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]$/; - util.arrayEach(labels, function(label) { - if (!label.length || label.length < 1 || label.length > 63) { - throw util.error(new Error(), { - code: 'ValidationError', - message: 'Hostname label length should be between 1 to 63 characters, inclusive.' - }); - } - if (!hostPattern.test(label)) { - throw AWS.util.error(new Error(), - {code: 'ValidationError', message: label + ' is not hostname compatible.'}); - } - }); -} - -module.exports = { - populateHostPrefix: populateHostPrefix -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 30083: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); -var JsonBuilder = __nccwpck_require__(47495); -var JsonParser = __nccwpck_require__(5474); -var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); - -function buildRequest(req) { - var httpRequest = req.httpRequest; - var api = req.service.api; - var target = api.targetPrefix + '.' + api.operations[req.operation].name; - var version = api.jsonVersion || '1.0'; - var input = api.operations[req.operation].input; - var builder = new JsonBuilder(); +/***/ 22094: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (version === 1) version = '1.0'; +"use strict"; - if (api.awsQueryCompatible) { - if (!httpRequest.params) { - httpRequest.params = {}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Uint8ArrayBlobAdapter = void 0; +const transforms_1 = __nccwpck_require__(82098); +class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return (0, transforms_1.transformFromString)(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return (0, transforms_1.transformToString)(this, encoding); } - // because Query protocol does this. - Object.assign(httpRequest.params, req.params); - } - - httpRequest.body = builder.build(req.params || {}, input); - httpRequest.headers['Content-Type'] = 'application/x-amz-json-' + version; - httpRequest.headers['X-Amz-Target'] = target; - - populateHostPrefix(req); } +exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter; -function extractError(resp) { - var error = {}; - var httpResponse = resp.httpResponse; - error.code = httpResponse.headers['x-amzn-errortype'] || 'UnknownError'; - if (typeof error.code === 'string') { - error.code = error.code.split(':')[0]; - } +/***/ }), - if (httpResponse.body.length > 0) { - try { - var e = JSON.parse(httpResponse.body.toString()); +/***/ 82098: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var code = e.__type || e.code || e.Code; - if (code) { - error.code = code.split('#').pop(); - } - if (error.code === 'RequestEntityTooLarge') { - error.message = 'Request body must be less than 1 MB'; - } else { - error.message = (e.message || e.Message || null); - } +"use strict"; - // The minimized models do not have error shapes, so - // without expanding the model size, it's not possible - // to validate the response shape (members) or - // check if any are sensitive to logging. - - // Assign the fields as non-enumerable, allowing specific access only. - for (var key in e || {}) { - if (key === 'code' || key === 'message') { - continue; - } - error['[' + key + ']'] = 'See error.' + key + ' for details.'; - Object.defineProperty(error, key, { - value: e[key], - enumerable: false, - writable: true - }); - } - } catch (e) { - error.statusCode = httpResponse.statusCode; - error.message = httpResponse.statusMessage; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.transformFromString = exports.transformToString = void 0; +const util_base64_1 = __nccwpck_require__(75600); +const util_utf8_1 = __nccwpck_require__(41895); +const Uint8ArrayBlobAdapter_1 = __nccwpck_require__(22094); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(payload); } - } else { - error.statusCode = httpResponse.statusCode; - error.message = httpResponse.statusCode.toString(); - } - - resp.error = util.error(new Error(), error); + return (0, util_utf8_1.toUtf8)(payload); } - -function extractData(resp) { - var body = resp.httpResponse.body.toString() || '{}'; - if (resp.request.service.config.convertResponseTypes === false) { - resp.data = JSON.parse(body); - } else { - var operation = resp.request.service.api.operations[resp.request.operation]; - var shape = operation.output || {}; - var parser = new JsonParser(); - resp.data = parser.parse(body, shape); - } +exports.transformToString = transformToString; +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_base64_1.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter_1.Uint8ArrayBlobAdapter.mutate((0, util_utf8_1.fromUtf8)(str)); } - -/** - * @api private - */ -module.exports = { - buildRequest: buildRequest, - extractError: extractError, - extractData: extractData -}; +exports.transformFromString = transformFromString; /***/ }), -/***/ 90761: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var util = __nccwpck_require__(77985); -var QueryParamSerializer = __nccwpck_require__(45175); -var Shape = __nccwpck_require__(71349); -var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); - -function buildRequest(req) { - var operation = req.service.api.operations[req.operation]; - var httpRequest = req.httpRequest; - httpRequest.headers['Content-Type'] = - 'application/x-www-form-urlencoded; charset=utf-8'; - httpRequest.params = { - Version: req.service.api.apiVersion, - Action: operation.name - }; - - // convert the request parameters into a list of query params, - // e.g. Deeply.NestedParam.0.Name=value - var builder = new QueryParamSerializer(); - builder.serialize(req.params, operation.input, function(name, value) { - httpRequest.params[name] = value; - }); - httpRequest.body = util.queryParamsToString(httpRequest.params); - - populateHostPrefix(req); -} +/***/ 23636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function extractError(resp) { - var data, body = resp.httpResponse.body.toString(); - if (body.match(' { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); }); - } else { - resp.error = util.error(new Error(), { - code: resp.httpResponse.statusCode, - message: null + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); }); - } -} - -function extractData(resp) { - var req = resp.request; - var operation = req.service.api.operations[req.operation]; - var shape = operation.output || {}; - var origRules = shape; - - if (origRules.resultWrapper) { - var tmp = Shape.create({type: 'structure'}); - tmp.members[origRules.resultWrapper] = shape; - tmp.memberNames = [origRules.resultWrapper]; - util.property(shape, 'name', shape.resultWrapper); - shape = tmp; - } - - var parser = new AWS.XML.Parser(); - - // TODO: Refactor XML Parser to parse RequestId from response. - if (shape && shape.members && !shape.members._XAMZRequestId) { - var requestIdShape = Shape.create( - { type: 'string' }, - { api: { protocol: 'query' } }, - 'requestId' - ); - shape.members._XAMZRequestId = requestIdShape; - } + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; - var data = parser.parse(resp.httpResponse.body.toString(), shape); - resp.requestId = data._XAMZRequestId || data.requestId; - if (data._XAMZRequestId) delete data._XAMZRequestId; +/***/ }), - if (origRules.resultWrapper) { - if (data[origRules.resultWrapper]) { - util.update(data, data[origRules.resultWrapper]); - delete data[origRules.resultWrapper]; - } - } +/***/ 96607: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - resp.data = data; -} +"use strict"; -/** - * @api private - */ -module.exports = { - buildRequest: buildRequest, - extractError: extractError, - extractData: extractData -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(22094), exports); +tslib_1.__exportStar(__nccwpck_require__(23636), exports); +tslib_1.__exportStar(__nccwpck_require__(4515), exports); /***/ }), -/***/ 98200: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 4515: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var util = __nccwpck_require__(77985); -var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); - -function populateMethod(req) { - req.httpRequest.method = req.service.api.operations[req.operation].httpMethod; -} - -function generateURI(endpointPath, operationPath, input, params) { - var uri = [endpointPath, operationPath].join('/'); - uri = uri.replace(/\/+/g, '/'); - - var queryString = {}, queryStringSet = false; - util.each(input.members, function (name, member) { - var paramValue = params[name]; - if (paramValue === null || paramValue === undefined) return; - if (member.location === 'uri') { - var regex = new RegExp('\\{' + member.name + '(\\+)?\\}'); - uri = uri.replace(regex, function(_, plus) { - var fn = plus ? util.uriEscapePath : util.uriEscape; - return fn(String(paramValue)); - }); - } else if (member.location === 'querystring') { - queryStringSet = true; - - if (member.type === 'list') { - queryString[member.name] = paramValue.map(function(val) { - return util.uriEscape(member.member.toWireFormat(val).toString()); - }); - } else if (member.type === 'map') { - util.each(paramValue, function(key, value) { - if (Array.isArray(value)) { - queryString[key] = value.map(function(val) { - return util.uriEscape(String(val)); - }); - } else { - queryString[key] = util.uriEscape(String(value)); - } - }); - } else { - queryString[member.name] = util.uriEscape(member.toWireFormat(paramValue).toString()); - } - } - }); +"use strict"; - if (queryStringSet) { - uri += (uri.indexOf('?') >= 0 ? '&' : '?'); - var parts = []; - util.arrayEach(Object.keys(queryString).sort(), function(key) { - if (!Array.isArray(queryString[key])) { - queryString[key] = [queryString[key]]; - } - for (var i = 0; i < queryString[key].length; i++) { - parts.push(util.uriEscape(String(key)) + '=' + queryString[key][i]); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = __nccwpck_require__(20258); +const util_buffer_from_1 = __nccwpck_require__(31381); +const stream_1 = __nccwpck_require__(12781); +const util_1 = __nccwpck_require__(73837); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new util_1.TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, }); - uri += parts.join('&'); - } - - return uri; -} - -function populateURI(req) { - var operation = req.service.api.operations[req.operation]; - var input = operation.input; +}; +exports.sdkStreamMixin = sdkStreamMixin; - var uri = generateURI(req.httpRequest.endpoint.path, operation.httpPath, input, req.params); - req.httpRequest.path = uri; -} -function populateHeaders(req) { - var operation = req.service.api.operations[req.operation]; - util.each(operation.input.members, function (name, member) { - var value = req.params[name]; - if (value === null || value === undefined) return; +/***/ }), - if (member.location === 'headers' && member.type === 'map') { - util.each(value, function(key, memberValue) { - req.httpRequest.headers[member.name + key] = memberValue; - }); - } else if (member.location === 'header') { - value = member.toWireFormat(value).toString(); - if (member.isJsonValue) { - value = util.base64.encode(value); - } - req.httpRequest.headers[member.name] = value; - } - }); -} +/***/ 26174: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function buildRequest(req) { - populateMethod(req); - populateURI(req); - populateHeaders(req); - populateHostPrefix(req); -} +"use strict"; -function extractError() { -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUriPath = void 0; +const escape_uri_1 = __nccwpck_require__(60010); +const escapeUriPath = (uri) => uri.split("/").map(escape_uri_1.escapeUri).join("/"); +exports.escapeUriPath = escapeUriPath; -function extractData(resp) { - var req = resp.request; - var data = {}; - var r = resp.httpResponse; - var operation = req.service.api.operations[req.operation]; - var output = operation.output; - // normalize headers names to lower-cased keys for matching - var headers = {}; - util.each(r.headers, function (k, v) { - headers[k.toLowerCase()] = v; - }); +/***/ }), - util.each(output.members, function(name, member) { - var header = (member.name || name).toLowerCase(); - if (member.location === 'headers' && member.type === 'map') { - data[name] = {}; - var location = member.isLocationName ? member.name : ''; - var pattern = new RegExp('^' + location + '(.+)', 'i'); - util.each(r.headers, function (k, v) { - var result = k.match(pattern); - if (result !== null) { - data[name][result[1]] = v; - } - }); - } else if (member.location === 'header') { - if (headers[header] !== undefined) { - var value = member.isJsonValue ? - util.base64.decode(headers[header]) : - headers[header]; - data[name] = member.toType(value); - } - } else if (member.location === 'statusCode') { - data[name] = parseInt(r.statusCode, 10); - } - }); +/***/ 60010: +/***/ ((__unused_webpack_module, exports) => { - resp.data = data; -} +"use strict"; -/** - * @api private - */ -module.exports = { - buildRequest: buildRequest, - extractError: extractError, - extractData: extractData, - generateURI: generateURI -}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escapeUri = void 0; +const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +exports.escapeUri = escapeUri; +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; /***/ }), -/***/ 5883: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); -var Rest = __nccwpck_require__(98200); -var Json = __nccwpck_require__(30083); -var JsonBuilder = __nccwpck_require__(47495); -var JsonParser = __nccwpck_require__(5474); - -var METHODS_WITHOUT_BODY = ['GET', 'HEAD', 'DELETE']; - -function unsetContentLength(req) { - var payloadMember = util.getRequestPayloadShape(req); - if ( - payloadMember === undefined && - METHODS_WITHOUT_BODY.indexOf(req.httpRequest.method) >= 0 - ) { - delete req.httpRequest.headers['Content-Length']; - } -} - -function populateBody(req) { - var builder = new JsonBuilder(); - var input = req.service.api.operations[req.operation].input; +/***/ 54197: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (input.payload) { - var params = {}; - var payloadShape = input.members[input.payload]; - params = req.params[input.payload]; +"use strict"; - if (payloadShape.type === 'structure') { - req.httpRequest.body = builder.build(params || {}, payloadShape); - applyContentTypeHeader(req); - } else if (params !== undefined) { - // non-JSON payload - req.httpRequest.body = params; - if (payloadShape.type === 'binary' || payloadShape.isStreaming) { - applyContentTypeHeader(req, true); - } - } - } else { - req.httpRequest.body = builder.build(req.params, input); - applyContentTypeHeader(req); - } -} +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(60010), exports); +tslib_1.__exportStar(__nccwpck_require__(26174), exports); -function applyContentTypeHeader(req, isBinary) { - if (!req.httpRequest.headers['Content-Type']) { - var type = isBinary ? 'binary/octet-stream' : 'application/json'; - req.httpRequest.headers['Content-Type'] = type; - } -} -function buildRequest(req) { - Rest.buildRequest(req); +/***/ }), - // never send body payload on GET/HEAD/DELETE - if (METHODS_WITHOUT_BODY.indexOf(req.httpRequest.method) < 0) { - populateBody(req); - } -} +/***/ 45917: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function extractError(resp) { - Json.extractError(resp); -} - -function extractData(resp) { - Rest.extractData(resp); - - var req = resp.request; - var operation = req.service.api.operations[req.operation]; - var rules = req.service.api.operations[req.operation].output || {}; - var parser; - var hasEventOutput = operation.hasEventOutput; - - if (rules.payload) { - var payloadMember = rules.members[rules.payload]; - var body = resp.httpResponse.body; - if (payloadMember.isEventStream) { - parser = new JsonParser(); - resp.data[payload] = util.createEventStream( - AWS.HttpClient.streamsApiVersion === 2 ? resp.httpResponse.stream : body, - parser, - payloadMember - ); - } else if (payloadMember.type === 'structure' || payloadMember.type === 'list') { - var parser = new JsonParser(); - resp.data[rules.payload] = parser.parse(body, payloadMember); - } else if (payloadMember.type === 'binary' || payloadMember.isStreaming) { - resp.data[rules.payload] = body; - } else { - resp.data[rules.payload] = payloadMember.toType(body); - } - } else { - var data = resp.data; - Json.extractData(resp); - resp.data = util.merge(data, resp.data); - } -} +"use strict"; -/** - * @api private - */ -module.exports = { - buildRequest: buildRequest, - extractError: extractError, - extractData: extractData, - unsetContentLength: unsetContentLength +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const fromUtf8 = (input) => { + const buf = (0, util_buffer_from_1.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); }; +exports.fromUtf8 = fromUtf8; /***/ }), -/***/ 15143: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var util = __nccwpck_require__(77985); -var Rest = __nccwpck_require__(98200); - -function populateBody(req) { - var input = req.service.api.operations[req.operation].input; - var builder = new AWS.XML.Builder(); - var params = req.params; - - var payload = input.payload; - if (payload) { - var payloadMember = input.members[payload]; - params = params[payload]; - if (params === undefined) return; +/***/ 41895: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (payloadMember.type === 'structure') { - var rootElement = payloadMember.name; - req.httpRequest.body = builder.toXML(params, payloadMember, rootElement, true); - } else { // non-xml payload - req.httpRequest.body = params; - } - } else { - req.httpRequest.body = builder.toXML(params, input, input.name || - input.shape || util.string.upperFirst(req.operation) + 'Request'); - } -} +"use strict"; -function buildRequest(req) { - Rest.buildRequest(req); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(4351); +tslib_1.__exportStar(__nccwpck_require__(45917), exports); +tslib_1.__exportStar(__nccwpck_require__(95470), exports); +tslib_1.__exportStar(__nccwpck_require__(99960), exports); - // never send body payload on GET/HEAD - if (['GET', 'HEAD'].indexOf(req.httpRequest.method) < 0) { - populateBody(req); - } -} -function extractError(resp) { - Rest.extractError(resp); +/***/ }), - var data; - try { - data = new AWS.XML.Parser().parse(resp.httpResponse.body.toString()); - } catch (e) { - data = { - Code: resp.httpResponse.statusCode, - Message: resp.httpResponse.statusMessage - }; - } +/***/ 95470: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (data.Errors) data = data.Errors; - if (data.Error) data = data.Error; - if (data.Code) { - resp.error = util.error(new Error(), { - code: data.Code, - message: data.Message - }); - } else { - resp.error = util.error(new Error(), { - code: resp.httpResponse.statusCode, - message: null - }); - } -} +"use strict"; -function extractData(resp) { - Rest.extractData(resp); - - var parser; - var req = resp.request; - var body = resp.httpResponse.body; - var operation = req.service.api.operations[req.operation]; - var output = operation.output; - - var hasEventOutput = operation.hasEventOutput; - - var payload = output.payload; - if (payload) { - var payloadMember = output.members[payload]; - if (payloadMember.isEventStream) { - parser = new AWS.XML.Parser(); - resp.data[payload] = util.createEventStream( - AWS.HttpClient.streamsApiVersion === 2 ? resp.httpResponse.stream : resp.httpResponse.body, - parser, - payloadMember - ); - } else if (payloadMember.type === 'structure') { - parser = new AWS.XML.Parser(); - resp.data[payload] = parser.parse(body.toString(), payloadMember); - } else if (payloadMember.type === 'binary' || payloadMember.isStreaming) { - resp.data[payload] = body; - } else { - resp.data[payload] = payloadMember.toType(body); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUint8Array = void 0; +const fromUtf8_1 = __nccwpck_require__(45917); +const toUint8Array = (data) => { + if (typeof data === "string") { + return (0, fromUtf8_1.fromUtf8)(data); } - } else if (body.length > 0) { - parser = new AWS.XML.Parser(); - var data = parser.parse(body.toString(), output); - util.update(resp.data, data); - } -} - -/** - * @api private - */ -module.exports = { - buildRequest: buildRequest, - extractError: extractError, - extractData: extractData + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); }; +exports.toUint8Array = toUint8Array; /***/ }), -/***/ 91822: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * Resolve client-side monitoring configuration from either environmental variables - * or shared config file. Configurations from environmental variables have higher priority - * than those from shared config file. The resolver will try to read the shared config file - * no matter whether the AWS_SDK_LOAD_CONFIG variable is set. - * @api private - */ -function resolveMonitoringConfig() { - var config = { - port: undefined, - clientId: undefined, - enabled: undefined, - host: undefined - }; - if (fromEnvironment(config) || fromConfigFile(config)) return toJSType(config); - return toJSType(config); -} - -/** - * Resolve configurations from environmental variables. - * @param {object} client side monitoring config object needs to be resolved - * @returns {boolean} whether resolving configurations is done - * @api private - */ -function fromEnvironment(config) { - config.port = config.port || process.env.AWS_CSM_PORT; - config.enabled = config.enabled || process.env.AWS_CSM_ENABLED; - config.clientId = config.clientId || process.env.AWS_CSM_CLIENT_ID; - config.host = config.host || process.env.AWS_CSM_HOST; - return config.port && config.enabled && config.clientId && config.host || - ['false', '0'].indexOf(config.enabled) >= 0; //no need to read shared config file if explicitely disabled -} - -/** - * Resolve cofigurations from shared config file with specified role name - * @param {object} client side monitoring config object needs to be resolved - * @returns {boolean} whether resolving configurations is done - * @api private - */ -function fromConfigFile(config) { - var sharedFileConfig; - try { - var configFile = AWS.util.iniLoader.loadFrom({ - isConfig: true, - filename: process.env[AWS.util.sharedConfigFileEnv] - }); - var sharedFileConfig = configFile[ - process.env.AWS_PROFILE || AWS.util.defaultProfile - ]; - } catch (err) { - return false; - } - if (!sharedFileConfig) return config; - config.port = config.port || sharedFileConfig.csm_port; - config.enabled = config.enabled || sharedFileConfig.csm_enabled; - config.clientId = config.clientId || sharedFileConfig.csm_client_id; - config.host = config.host || sharedFileConfig.csm_host; - return config.port && config.enabled && config.clientId && config.host; -} +/***/ 99960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Transfer the resolved configuration value to proper types: port as number, enabled - * as boolean and clientId as string. The 'enabled' flag is valued to false when set - * to 'false' or '0'. - * @param {object} resolved client side monitoring config - * @api private - */ -function toJSType(config) { - //config.XXX is either undefined or string - var falsyNotations = ['false', '0', undefined]; - if (!config.enabled || falsyNotations.indexOf(config.enabled.toLowerCase()) >= 0) { - config.enabled = false; - } else { - config.enabled = true; - } - config.port = config.port ? parseInt(config.port, 10) : undefined; - return config; -} +"use strict"; -module.exports = resolveMonitoringConfig; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toUtf8 = void 0; +const util_buffer_from_1 = __nccwpck_require__(31381); +const toUtf8 = (input) => (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +exports.toUtf8 = toUtf8; /***/ }), -/***/ 66807: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 49690: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { -var util = (__nccwpck_require__(28437).util); -var dgram = __nccwpck_require__(71891); -var stringToBuffer = util.buffer.toBuffer; - -var MAX_MESSAGE_SIZE = 1024 * 8; // 8 KB - -/** - * Publishes metrics via udp. - * @param {object} options Paramters for Publisher constructor - * @param {number} [options.port = 31000] Port number - * @param {string} [options.clientId = ''] Client Identifier - * @param {boolean} [options.enabled = false] enable sending metrics datagram - * @api private - */ -function Publisher(options) { - // handle configuration - options = options || {}; - this.enabled = options.enabled || false; - this.port = options.port || 31000; - this.clientId = options.clientId || ''; - this.address = options.host || '127.0.0.1'; - if (this.clientId.length > 255) { - // ClientId has a max length of 255 - this.clientId = this.clientId.substr(0, 255); - } - this.messagesInFlight = 0; -} - -Publisher.prototype.fieldsToTrim = { - UserAgent: 256, - SdkException: 128, - SdkExceptionMessage: 512, - AwsException: 128, - AwsExceptionMessage: 512, - FinalSdkException: 128, - FinalSdkExceptionMessage: 512, - FinalAwsException: 128, - FinalAwsExceptionMessage: 512 - -}; - -/** - * Trims fields that have a specified max length. - * @param {object} event ApiCall or ApiCallAttempt event. - * @returns {object} - * @api private - */ -Publisher.prototype.trimFields = function(event) { - var trimmableFields = Object.keys(this.fieldsToTrim); - for (var i = 0, iLen = trimmableFields.length; i < iLen; i++) { - var field = trimmableFields[i]; - if (event.hasOwnProperty(field)) { - var maxLength = this.fieldsToTrim[field]; - var value = event[field]; - if (value && value.length > maxLength) { - event[field] = value.substr(0, maxLength); - } - } - } - return event; -}; - -/** - * Handles ApiCall and ApiCallAttempt events. - * @param {Object} event apiCall or apiCallAttempt event. - * @api private - */ -Publisher.prototype.eventHandler = function(event) { - // set the clientId - event.ClientId = this.clientId; - - this.trimFields(event); - - var message = stringToBuffer(JSON.stringify(event)); - if (!this.enabled || message.length > MAX_MESSAGE_SIZE) { - // drop the message if publisher not enabled or it is too large - return; - } - - this.publishDatagram(message); -}; - -/** - * Publishes message to an agent. - * @param {Buffer} message JSON message to send to agent. - * @api private - */ -Publisher.prototype.publishDatagram = function(message) { - var self = this; - var client = this.getClient(); - - this.messagesInFlight++; - this.client.send(message, 0, message.length, this.port, this.address, function(err, bytes) { - if (--self.messagesInFlight <= 0) { - // destroy existing client so the event loop isn't kept open - self.destroyClient(); - } - }); -}; - -/** - * Returns an existing udp socket, or creates one if it doesn't already exist. - * @api private - */ -Publisher.prototype.getClient = function() { - if (!this.client) { - this.client = dgram.createSocket('udp4'); - } - return this.client; -}; - -/** - * Destroys the udp socket. - * @api private - */ -Publisher.prototype.destroyClient = function() { - if (this.client) { - this.client.close(); - this.client = void 0; - } -}; - -module.exports = { - Publisher: Publisher -}; - - -/***/ }), - -/***/ 45175: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); - -function QueryParamSerializer() { -} - -QueryParamSerializer.prototype.serialize = function(params, shape, fn) { - serializeStructure('', params, shape, fn); -}; - -function ucfirst(shape) { - if (shape.isQueryName || shape.api.protocol !== 'ec2') { - return shape.name; - } else { - return shape.name[0].toUpperCase() + shape.name.substr(1); - } -} - -function serializeStructure(prefix, struct, rules, fn) { - util.each(rules.members, function(name, member) { - var value = struct[name]; - if (value === null || value === undefined) return; - - var memberName = ucfirst(member); - memberName = prefix ? prefix + '.' + memberName : memberName; - serializeMember(memberName, value, member, fn); - }); -} - -function serializeMap(name, map, rules, fn) { - var i = 1; - util.each(map, function (key, value) { - var prefix = rules.flattened ? '.' : '.entry.'; - var position = prefix + (i++) + '.'; - var keyName = position + (rules.key.name || 'key'); - var valueName = position + (rules.value.name || 'value'); - serializeMember(name + keyName, key, rules.key, fn); - serializeMember(name + valueName, value, rules.value, fn); - }); -} - -function serializeList(name, list, rules, fn) { - var memberRules = rules.member || {}; - - if (list.length === 0) { - fn.call(this, name, null); - return; - } - - util.arrayEach(list, function (v, n) { - var suffix = '.' + (n + 1); - if (rules.api.protocol === 'ec2') { - // Do nothing for EC2 - suffix = suffix + ''; // make linter happy - } else if (rules.flattened) { - if (memberRules.name) { - var parts = name.split('.'); - parts.pop(); - parts.push(ucfirst(memberRules)); - name = parts.join('.'); - } - } else { - suffix = '.' + (memberRules.name ? memberRules.name : 'member') + suffix; - } - serializeMember(name + suffix, v, memberRules, fn); - }); -} - -function serializeMember(name, value, rules, fn) { - if (value === null || value === undefined) return; - if (rules.type === 'structure') { - serializeStructure(name, value, rules, fn); - } else if (rules.type === 'list') { - serializeList(name, value, rules, fn); - } else if (rules.type === 'map') { - serializeMap(name, value, rules, fn); - } else { - fn(name, rules.toWireFormat(value).toString()); - } -} - -/** - * @api private - */ -module.exports = QueryParamSerializer; - - -/***/ }), - -/***/ 16612: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * @api private - */ -var service = null; - -/** - * @api private - */ -var api = { - signatureVersion: 'v4', - signingName: 'rds-db', - operations: {} -}; - -/** - * @api private - */ -var requiredAuthTokenOptions = { - region: 'string', - hostname: 'string', - port: 'number', - username: 'string' -}; - -/** - * A signer object can be used to generate an auth token to a database. - */ -AWS.RDS.Signer = AWS.util.inherit({ - /** - * Creates a signer object can be used to generate an auth token. - * - * @option options credentials [AWS.Credentials] the AWS credentials - * to sign requests with. Uses the default credential provider chain - * if not specified. - * @option options hostname [String] the hostname of the database to connect to. - * @option options port [Number] the port number the database is listening on. - * @option options region [String] the region the database is located in. - * @option options username [String] the username to login as. - * @example Passing in options to constructor - * var signer = new AWS.RDS.Signer({ - * credentials: new AWS.SharedIniFileCredentials({profile: 'default'}), - * region: 'us-east-1', - * hostname: 'db.us-east-1.rds.amazonaws.com', - * port: 8000, - * username: 'name' - * }); - */ - constructor: function Signer(options) { - this.options = options || {}; - }, - - /** - * @api private - * Strips the protocol from a url. - */ - convertUrlToAuthToken: function convertUrlToAuthToken(url) { - // we are always using https as the protocol - var protocol = 'https://'; - if (url.indexOf(protocol) === 0) { - return url.substring(protocol.length); - } - }, - - /** - * @overload getAuthToken(options = {}, [callback]) - * Generate an auth token to a database. - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * - * @param options [map] The fields to use when generating an auth token. - * Any options specified here will be merged on top of any options passed - * to AWS.RDS.Signer: - * - * * **credentials** (AWS.Credentials) — the AWS credentials - * to sign requests with. Uses the default credential provider chain - * if not specified. - * * **hostname** (String) — the hostname of the database to connect to. - * * **port** (Number) — the port number the database is listening on. - * * **region** (String) — the region the database is located in. - * * **username** (String) — the username to login as. - * @return [String] if called synchronously (with no callback), returns the - * auth token. - * @return [null] nothing is returned if a callback is provided. - * @callback callback function (err, token) - * If a callback is supplied, it is called when an auth token has been generated. - * @param err [Error] the error object returned from the signer. - * @param token [String] the auth token. - * - * @example Generating an auth token synchronously - * var signer = new AWS.RDS.Signer({ - * // configure options - * region: 'us-east-1', - * username: 'default', - * hostname: 'db.us-east-1.amazonaws.com', - * port: 8000 - * }); - * var token = signer.getAuthToken({ - * // these options are merged with those defined when creating the signer, overriding in the case of a duplicate option - * // credentials are not specified here or when creating the signer, so default credential provider will be used - * username: 'test' // overriding username - * }); - * @example Generating an auth token asynchronously - * var signer = new AWS.RDS.Signer({ - * // configure options - * region: 'us-east-1', - * username: 'default', - * hostname: 'db.us-east-1.amazonaws.com', - * port: 8000 - * }); - * signer.getAuthToken({ - * // these options are merged with those defined when creating the signer, overriding in the case of a duplicate option - * // credentials are not specified here or when creating the signer, so default credential provider will be used - * username: 'test' // overriding username - * }, function(err, token) { - * if (err) { - * // handle error - * } else { - * // use token - * } - * }); - * - */ - getAuthToken: function getAuthToken(options, callback) { - if (typeof options === 'function' && callback === undefined) { - callback = options; - options = {}; - } - var self = this; - var hasCallback = typeof callback === 'function'; - // merge options with existing options - options = AWS.util.merge(this.options, options); - // validate options - var optionsValidation = this.validateAuthTokenOptions(options); - if (optionsValidation !== true) { - if (hasCallback) { - return callback(optionsValidation, null); - } - throw optionsValidation; - } - - // 15 minutes - var expires = 900; - // create service to generate a request from - var serviceOptions = { - region: options.region, - endpoint: new AWS.Endpoint(options.hostname + ':' + options.port), - paramValidation: false, - signatureVersion: 'v4' - }; - if (options.credentials) { - serviceOptions.credentials = options.credentials; - } - service = new AWS.Service(serviceOptions); - // ensure the SDK is using sigv4 signing (config is not enough) - service.api = api; - - var request = service.makeRequest(); - // add listeners to request to properly build auth token - this.modifyRequestForAuthToken(request, options); - - if (hasCallback) { - request.presign(expires, function(err, url) { - if (url) { - url = self.convertUrlToAuthToken(url); - } - callback(err, url); - }); - } else { - var url = request.presign(expires); - return this.convertUrlToAuthToken(url); - } - }, - - /** - * @api private - * Modifies a request to allow the presigner to generate an auth token. - */ - modifyRequestForAuthToken: function modifyRequestForAuthToken(request, options) { - request.on('build', request.buildAsGet); - var httpRequest = request.httpRequest; - httpRequest.body = AWS.util.queryParamsToString({ - Action: 'connect', - DBUser: options.username - }); - }, - - /** - * @api private - * Validates that the options passed in contain all the keys with values of the correct type that - * are needed to generate an auth token. - */ - validateAuthTokenOptions: function validateAuthTokenOptions(options) { - // iterate over all keys in options - var message = ''; - options = options || {}; - for (var key in requiredAuthTokenOptions) { - if (!Object.prototype.hasOwnProperty.call(requiredAuthTokenOptions, key)) { - continue; - } - if (typeof options[key] !== requiredAuthTokenOptions[key]) { - message += 'option \'' + key + '\' should have been type \'' + requiredAuthTokenOptions[key] + '\', was \'' + typeof options[key] + '\'.\n'; - } - } - if (message.length) { - return AWS.util.error(new Error(), { - code: 'InvalidParameter', - message: message - }); - } - return true; - } -}); - - -/***/ }), - -/***/ 81370: -/***/ ((module) => { - -module.exports = { - //provide realtime clock for performance measurement - now: function now() { - var second = process.hrtime(); - return second[0] * 1000 + (second[1] / 1000000); - } -}; - - -/***/ }), - -/***/ 99517: -/***/ ((module) => { - -function isFipsRegion(region) { - return typeof region === 'string' && (region.startsWith('fips-') || region.endsWith('-fips')); -} - -function isGlobalRegion(region) { - return typeof region === 'string' && ['aws-global', 'aws-us-gov-global'].includes(region); -} - -function getRealRegion(region) { - return ['fips-aws-global', 'aws-fips', 'aws-global'].includes(region) - ? 'us-east-1' - : ['fips-aws-us-gov-global', 'aws-us-gov-global'].includes(region) - ? 'us-gov-west-1' - : region.replace(/fips-(dkr-|prod-)?|-fips/, ''); -} +"use strict"; -module.exports = { - isFipsRegion: isFipsRegion, - isGlobalRegion: isGlobalRegion, - getRealRegion: getRealRegion +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; - - -/***/ }), - -/***/ 18262: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); -var regionConfig = __nccwpck_require__(80738); - -function generateRegionPrefix(region) { - if (!region) return null; - var parts = region.split('-'); - if (parts.length < 3) return null; - return parts.slice(0, parts.length - 2).join('-') + '-*'; -} - -function derivedKeys(service) { - var region = service.config.region; - var regionPrefix = generateRegionPrefix(region); - var endpointPrefix = service.api.endpointPrefix; - - return [ - [region, endpointPrefix], - [regionPrefix, endpointPrefix], - [region, '*'], - [regionPrefix, '*'], - ['*', endpointPrefix], - [region, 'internal-*'], - ['*', '*'] - ].map(function(item) { - return item[0] && item[1] ? item.join('/') : null; - }); -} - -function applyConfig(service, config) { - util.each(config, function(key, value) { - if (key === 'globalEndpoint') return; - if (service.config[key] === undefined || service.config[key] === null) { - service.config[key] = value; - } - }); -} - -function configureEndpoint(service) { - var keys = derivedKeys(service); - var useFipsEndpoint = service.config.useFipsEndpoint; - var useDualstackEndpoint = service.config.useDualstackEndpoint; - for (var i = 0; i < keys.length; i++) { - var key = keys[i]; - if (!key) continue; - - var rules = useFipsEndpoint - ? useDualstackEndpoint - ? regionConfig.dualstackFipsRules - : regionConfig.fipsRules - : useDualstackEndpoint - ? regionConfig.dualstackRules - : regionConfig.rules; - - if (Object.prototype.hasOwnProperty.call(rules, key)) { - var config = rules[key]; - if (typeof config === 'string') { - config = regionConfig.patterns[config]; - } - - // set global endpoint - service.isGlobalEndpoint = !!config.globalEndpoint; - if (config.signingRegion) { - service.signingRegion = config.signingRegion; - } - - // signature version - if (!config.signatureVersion) { - // Note: config is a global object and should not be mutated here. - // However, we are retaining this line for backwards compatibility. - // The non-v4 signatureVersion will be set in a copied object below. - config.signatureVersion = 'v4'; - } - - var useBearer = (service.api && service.api.signatureVersion) === 'bearer'; - - // merge config - applyConfig(service, Object.assign( - {}, - config, - { signatureVersion: useBearer ? 'bearer' : config.signatureVersion } - )); - return; - } - } -} - -function getEndpointSuffix(region) { - var regionRegexes = { - '^(us|eu|ap|sa|ca|me)\\-\\w+\\-\\d+$': 'amazonaws.com', - '^cn\\-\\w+\\-\\d+$': 'amazonaws.com.cn', - '^us\\-gov\\-\\w+\\-\\d+$': 'amazonaws.com', - '^us\\-iso\\-\\w+\\-\\d+$': 'c2s.ic.gov', - '^us\\-isob\\-\\w+\\-\\d+$': 'sc2s.sgov.gov' - }; - var defaultSuffix = 'amazonaws.com'; - var regexes = Object.keys(regionRegexes); - for (var i = 0; i < regexes.length; i++) { - var regionPattern = RegExp(regexes[i]); - var dnsSuffix = regionRegexes[regexes[i]]; - if (regionPattern.test(region)) return dnsSuffix; - } - return defaultSuffix; +const events_1 = __nccwpck_require__(82361); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const promisify_1 = __importDefault(__nccwpck_require__(66570)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; } - -/** - * @api private - */ -module.exports = { - configureEndpoint: configureEndpoint, - getEndpointSuffix: getEndpointSuffix, -}; - - -/***/ }), - -/***/ 78652: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var AcceptorStateMachine = __nccwpck_require__(68118); -var inherit = AWS.util.inherit; -var domain = AWS.util.domain; -var jmespath = __nccwpck_require__(87783); - -/** - * @api private - */ -var hardErrorStates = {success: 1, error: 1, complete: 1}; - -function isTerminalState(machine) { - return Object.prototype.hasOwnProperty.call(hardErrorStates, machine._asm.currentState); -} - -var fsm = new AcceptorStateMachine(); -fsm.setupStates = function() { - var transition = function(_, done) { - var self = this; - self._haltHandlersOnError = false; - - self.emit(self._asm.currentState, function(err) { - if (err) { - if (isTerminalState(self)) { - if (domain && self.domain instanceof domain.Domain) { - err.domainEmitter = self; - err.domain = self.domain; - err.domainThrown = false; - self.domain.emit('error', err); - } else { - throw err; - } - } else { - self.response.error = err; - done(err); - } - } else { - done(self.response.error); - } - }); - - }; - - this.addState('validate', 'build', 'error', transition); - this.addState('build', 'afterBuild', 'restart', transition); - this.addState('afterBuild', 'sign', 'restart', transition); - this.addState('sign', 'send', 'retry', transition); - this.addState('retry', 'afterRetry', 'afterRetry', transition); - this.addState('afterRetry', 'sign', 'error', transition); - this.addState('send', 'validateResponse', 'retry', transition); - this.addState('validateResponse', 'extractData', 'extractError', transition); - this.addState('extractError', 'extractData', 'retry', transition); - this.addState('extractData', 'success', 'retry', transition); - this.addState('restart', 'build', 'error', transition); - this.addState('success', 'complete', 'complete', transition); - this.addState('error', 'complete', 'complete', transition); - this.addState('complete', null, null, transition); -}; -fsm.setupStates(); - -/** - * ## Asynchronous Requests - * - * All requests made through the SDK are asynchronous and use a - * callback interface. Each service method that kicks off a request - * returns an `AWS.Request` object that you can use to register - * callbacks. - * - * For example, the following service method returns the request - * object as "request", which can be used to register callbacks: - * - * ```javascript - * // request is an AWS.Request object - * var request = ec2.describeInstances(); - * - * // register callbacks on request to retrieve response data - * request.on('success', function(response) { - * console.log(response.data); - * }); - * ``` - * - * When a request is ready to be sent, the {send} method should - * be called: - * - * ```javascript - * request.send(); - * ``` - * - * Since registered callbacks may or may not be idempotent, requests should only - * be sent once. To perform the same operation multiple times, you will need to - * create multiple request objects, each with its own registered callbacks. - * - * ## Removing Default Listeners for Events - * - * Request objects are built with default listeners for the various events, - * depending on the service type. In some cases, you may want to remove - * some built-in listeners to customize behaviour. Doing this requires - * access to the built-in listener functions, which are exposed through - * the {AWS.EventListeners.Core} namespace. For instance, you may - * want to customize the HTTP handler used when sending a request. In this - * case, you can remove the built-in listener associated with the 'send' - * event, the {AWS.EventListeners.Core.SEND} listener and add your own. - * - * ## Multiple Callbacks and Chaining - * - * You can register multiple callbacks on any request object. The - * callbacks can be registered for different events, or all for the - * same event. In addition, you can chain callback registration, for - * example: - * - * ```javascript - * request. - * on('success', function(response) { - * console.log("Success!"); - * }). - * on('error', function(error, response) { - * console.log("Error!"); - * }). - * on('complete', function(response) { - * console.log("Always!"); - * }). - * send(); - * ``` - * - * The above example will print either "Success! Always!", or "Error! Always!", - * depending on whether the request succeeded or not. - * - * @!attribute httpRequest - * @readonly - * @!group HTTP Properties - * @return [AWS.HttpRequest] the raw HTTP request object - * containing request headers and body information - * sent by the service. - * - * @!attribute startTime - * @readonly - * @!group Operation Properties - * @return [Date] the time that the request started - * - * @!group Request Building Events - * - * @!event validate(request) - * Triggered when a request is being validated. Listeners - * should throw an error if the request should not be sent. - * @param request [Request] the request object being sent - * @see AWS.EventListeners.Core.VALIDATE_CREDENTIALS - * @see AWS.EventListeners.Core.VALIDATE_REGION - * @example Ensuring that a certain parameter is set before sending a request - * var req = s3.putObject(params); - * req.on('validate', function() { - * if (!req.params.Body.match(/^Hello\s/)) { - * throw new Error('Body must start with "Hello "'); - * } - * }); - * req.send(function(err, data) { ... }); - * - * @!event build(request) - * Triggered when the request payload is being built. Listeners - * should fill the necessary information to send the request - * over HTTP. - * @param (see AWS.Request~validate) - * @example Add a custom HTTP header to a request - * var req = s3.putObject(params); - * req.on('build', function() { - * req.httpRequest.headers['Custom-Header'] = 'value'; - * }); - * req.send(function(err, data) { ... }); - * - * @!event sign(request) - * Triggered when the request is being signed. Listeners should - * add the correct authentication headers and/or adjust the body, - * depending on the authentication mechanism being used. - * @param (see AWS.Request~validate) - * - * @!group Request Sending Events - * - * @!event send(response) - * Triggered when the request is ready to be sent. Listeners - * should call the underlying transport layer to initiate - * the sending of the request. - * @param response [Response] the response object - * @context [Request] the request object that was sent - * @see AWS.EventListeners.Core.SEND - * - * @!event retry(response) - * Triggered when a request failed and might need to be retried or redirected. - * If the response is retryable, the listener should set the - * `response.error.retryable` property to `true`, and optionally set - * `response.error.retryDelay` to the millisecond delay for the next attempt. - * In the case of a redirect, `response.error.redirect` should be set to - * `true` with `retryDelay` set to an optional delay on the next request. - * - * If a listener decides that a request should not be retried, - * it should set both `retryable` and `redirect` to false. - * - * Note that a retryable error will be retried at most - * {AWS.Config.maxRetries} times (based on the service object's config). - * Similarly, a request that is redirected will only redirect at most - * {AWS.Config.maxRedirects} times. - * - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * @example Adding a custom retry for a 404 response - * request.on('retry', function(response) { - * // this resource is not yet available, wait 10 seconds to get it again - * if (response.httpResponse.statusCode === 404 && response.error) { - * response.error.retryable = true; // retry this error - * response.error.retryDelay = 10000; // wait 10 seconds - * } - * }); - * - * @!group Data Parsing Events - * - * @!event extractError(response) - * Triggered on all non-2xx requests so that listeners can extract - * error details from the response body. Listeners to this event - * should set the `response.error` property. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!event extractData(response) - * Triggered in successful requests to allow listeners to - * de-serialize the response body into `response.data`. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!group Completion Events - * - * @!event success(response) - * Triggered when the request completed successfully. - * `response.data` will contain the response data and - * `response.error` will be null. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!event error(error, response) - * Triggered when an error occurs at any point during the - * request. `response.error` will contain details about the error - * that occurred. `response.data` will be null. - * @param error [Error] the error object containing details about - * the error that occurred. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!event complete(response) - * Triggered whenever a request cycle completes. `response.error` - * should be checked, since the request may have failed. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!group HTTP Events - * - * @!event httpHeaders(statusCode, headers, response, statusMessage) - * Triggered when headers are sent by the remote server - * @param statusCode [Integer] the HTTP response code - * @param headers [map] the response headers - * @param (see AWS.Request~send) - * @param statusMessage [String] A status message corresponding to the HTTP - * response code - * @context (see AWS.Request~send) - * - * @!event httpData(chunk, response) - * Triggered when data is sent by the remote server - * @param chunk [Buffer] the buffer data containing the next data chunk - * from the server - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * @see AWS.EventListeners.Core.HTTP_DATA - * - * @!event httpUploadProgress(progress, response) - * Triggered when the HTTP request has uploaded more data - * @param progress [map] An object containing the `loaded` and `total` bytes - * of the request. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * @note This event will not be emitted in Node.js 0.8.x. - * - * @!event httpDownloadProgress(progress, response) - * Triggered when the HTTP request has downloaded more data - * @param progress [map] An object containing the `loaded` and `total` bytes - * of the request. - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * @note This event will not be emitted in Node.js 0.8.x. - * - * @!event httpError(error, response) - * Triggered when the HTTP request failed - * @param error [Error] the error object that was thrown - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @!event httpDone(response) - * Triggered when the server is finished sending data - * @param (see AWS.Request~send) - * @context (see AWS.Request~send) - * - * @see AWS.Response - */ -AWS.Request = inherit({ - - /** - * Creates a request for an operation on a given service with - * a set of input parameters. - * - * @param service [AWS.Service] the service to perform the operation on - * @param operation [String] the operation to perform on the service - * @param params [Object] parameters to send to the operation. - * See the operation's documentation for the format of the - * parameters. - */ - constructor: function Request(service, operation, params) { - var endpoint = service.endpoint; - var region = service.config.region; - var customUserAgent = service.config.customUserAgent; - - if (service.signingRegion) { - region = service.signingRegion; - } else if (service.isGlobalEndpoint) { - region = 'us-east-1'; - } - - this.domain = domain && domain.active; - this.service = service; - this.operation = operation; - this.params = params || {}; - this.httpRequest = new AWS.HttpRequest(endpoint, region); - this.httpRequest.appendToUserAgent(customUserAgent); - this.startTime = service.getSkewCorrectedDate(); - - this.response = new AWS.Response(this); - this._asm = new AcceptorStateMachine(fsm.states, 'validate'); - this._haltHandlersOnError = false; - - AWS.SequentialExecutor.call(this); - this.emit = this.emitEvent; - }, - - /** - * @!group Sending a Request - */ - - /** - * @overload send(callback = null) - * Sends the request object. - * - * @callback callback function(err, data) - * If a callback is supplied, it is called when a response is returned - * from the service. - * @context [AWS.Request] the request object being sent. - * @param err [Error] the error object returned from the request. - * Set to `null` if the request is successful. - * @param data [Object] the de-serialized data returned from - * the request. Set to `null` if a request error occurs. - * @example Sending a request with a callback - * request = s3.putObject({Bucket: 'bucket', Key: 'key'}); - * request.send(function(err, data) { console.log(err, data); }); - * @example Sending a request with no callback (using event handlers) - * request = s3.putObject({Bucket: 'bucket', Key: 'key'}); - * request.on('complete', function(response) { ... }); // register a callback - * request.send(); - */ - send: function send(callback) { - if (callback) { - // append to user agent - this.httpRequest.appendToUserAgent('callback'); - this.on('complete', function (resp) { - callback.call(resp, resp.error, resp.data); - }); - } - this.runTo(); - - return this.response; - }, - - /** - * @!method promise() - * Sends the request and returns a 'thenable' promise. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function(data) - * Called if the promise is fulfilled. - * @param data [Object] the de-serialized data returned from the request. - * @callback rejectedCallback function(error) - * Called if the promise is rejected. - * @param error [Error] the error object returned from the request. - * @return [Promise] A promise that represents the state of the request. - * @example Sending a request using promises. - * var request = s3.putObject({Bucket: 'bucket', Key: 'key'}); - * var result = request.promise(); - * result.then(function(data) { ... }, function(error) { ... }); - */ - - /** - * @api private - */ - build: function build(callback) { - return this.runTo('send', callback); - }, - - /** - * @api private - */ - runTo: function runTo(state, done) { - this._asm.runTo(state, done, this); - return this; - }, - - /** - * Aborts a request, emitting the error and complete events. - * - * @!macro nobrowser - * @example Aborting a request after sending - * var params = { - * Bucket: 'bucket', Key: 'key', - * Body: Buffer.alloc(1024 * 1024 * 5) // 5MB payload - * }; - * var request = s3.putObject(params); - * request.send(function (err, data) { - * if (err) console.log("Error:", err.code, err.message); - * else console.log(data); - * }); - * - * // abort request in 1 second - * setTimeout(request.abort.bind(request), 1000); - * - * // prints "Error: RequestAbortedError Request aborted by user" - * @return [AWS.Request] the same request object, for chaining. - * @since v1.4.0 - */ - abort: function abort() { - this.removeAllListeners('validateResponse'); - this.removeAllListeners('extractError'); - this.on('validateResponse', function addAbortedError(resp) { - resp.error = AWS.util.error(new Error('Request aborted by user'), { - code: 'RequestAbortedError', retryable: false - }); - }); - - if (this.httpRequest.stream && !this.httpRequest.stream.didCallback) { // abort HTTP stream - this.httpRequest.stream.abort(); - if (this.httpRequest._abortCallback) { - this.httpRequest._abortCallback(); - } else { - this.removeAllListeners('send'); // haven't sent yet, so let's not - } - } - - return this; - }, - - /** - * Iterates over each page of results given a pageable request, calling - * the provided callback with each page of data. After all pages have been - * retrieved, the callback is called with `null` data. - * - * @note This operation can generate multiple requests to a service. - * @example Iterating over multiple pages of objects in an S3 bucket - * var pages = 1; - * s3.listObjects().eachPage(function(err, data) { - * if (err) return; - * console.log("Page", pages++); - * console.log(data); - * }); - * @example Iterating over multiple pages with an asynchronous callback - * s3.listObjects(params).eachPage(function(err, data, done) { - * doSomethingAsyncAndOrExpensive(function() { - * // The next page of results isn't fetched until done is called - * done(); - * }); - * }); - * @callback callback function(err, data, [doneCallback]) - * Called with each page of resulting data from the request. If the - * optional `doneCallback` is provided in the function, it must be called - * when the callback is complete. - * - * @param err [Error] an error object, if an error occurred. - * @param data [Object] a single page of response data. If there is no - * more data, this object will be `null`. - * @param doneCallback [Function] an optional done callback. If this - * argument is defined in the function declaration, it should be called - * when the next page is ready to be retrieved. This is useful for - * controlling serial pagination across asynchronous operations. - * @return [Boolean] if the callback returns `false`, pagination will - * stop. - * - * @see AWS.Request.eachItem - * @see AWS.Response.nextPage - * @since v1.4.0 - */ - eachPage: function eachPage(callback) { - // Make all callbacks async-ish - callback = AWS.util.fn.makeAsync(callback, 3); - - function wrappedCallback(response) { - callback.call(response, response.error, response.data, function (result) { - if (result === false) return; - - if (response.hasNextPage()) { - response.nextPage().on('complete', wrappedCallback).send(); - } else { - callback.call(response, null, null, AWS.util.fn.noop); - } - }); - } - - this.on('complete', wrappedCallback).send(); - }, - - /** - * Enumerates over individual items of a request, paging the responses if - * necessary. - * - * @api experimental - * @since v1.4.0 - */ - eachItem: function eachItem(callback) { - var self = this; - function wrappedCallback(err, data) { - if (err) return callback(err, null); - if (data === null) return callback(null, null); - - var config = self.service.paginationConfig(self.operation); - var resultKey = config.resultKey; - if (Array.isArray(resultKey)) resultKey = resultKey[0]; - var items = jmespath.search(data, resultKey); - var continueIteration = true; - AWS.util.arrayEach(items, function(item) { - continueIteration = callback(null, item); - if (continueIteration === false) { - return AWS.util.abort; - } - }); - return continueIteration; - } - - this.eachPage(wrappedCallback); - }, - - /** - * @return [Boolean] whether the operation can return multiple pages of - * response data. - * @see AWS.Response.eachPage - * @since v1.4.0 - */ - isPageable: function isPageable() { - return this.service.paginationConfig(this.operation) ? true : false; - }, - - /** - * Sends the request and converts the request object into a readable stream - * that can be read from or piped into a writable stream. - * - * @note The data read from a readable stream contains only - * the raw HTTP body contents. - * @example Manually reading from a stream - * request.createReadStream().on('data', function(data) { - * console.log("Got data:", data.toString()); - * }); - * @example Piping a request body into a file - * var out = fs.createWriteStream('/path/to/outfile.jpg'); - * s3.service.getObject(params).createReadStream().pipe(out); - * @return [Stream] the readable stream object that can be piped - * or read from (by registering 'data' event listeners). - * @!macro nobrowser - */ - createReadStream: function createReadStream() { - var streams = AWS.util.stream; - var req = this; - var stream = null; - - if (AWS.HttpClient.streamsApiVersion === 2) { - stream = new streams.PassThrough(); - process.nextTick(function() { req.send(); }); - } else { - stream = new streams.Stream(); - stream.readable = true; - - stream.sent = false; - stream.on('newListener', function(event) { - if (!stream.sent && event === 'data') { - stream.sent = true; - process.nextTick(function() { req.send(); }); - } - }); - } - - this.on('error', function(err) { - stream.emit('error', err); - }); - - this.on('httpHeaders', function streamHeaders(statusCode, headers, resp) { - if (statusCode < 300) { - req.removeListener('httpData', AWS.EventListeners.Core.HTTP_DATA); - req.removeListener('httpError', AWS.EventListeners.Core.HTTP_ERROR); - req.on('httpError', function streamHttpError(error) { - resp.error = error; - resp.error.retryable = false; - }); - - var shouldCheckContentLength = false; - var expectedLen; - if (req.httpRequest.method !== 'HEAD') { - expectedLen = parseInt(headers['content-length'], 10); - } - if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) { - shouldCheckContentLength = true; - var receivedLen = 0; - } - - var checkContentLengthAndEmit = function checkContentLengthAndEmit() { - if (shouldCheckContentLength && receivedLen !== expectedLen) { - stream.emit('error', AWS.util.error( - new Error('Stream content length mismatch. Received ' + - receivedLen + ' of ' + expectedLen + ' bytes.'), - { code: 'StreamContentLengthMismatch' } - )); - } else if (AWS.HttpClient.streamsApiVersion === 2) { - stream.end(); - } else { - stream.emit('end'); - } - }; - - var httpStream = resp.httpResponse.createUnbufferedStream(); - - if (AWS.HttpClient.streamsApiVersion === 2) { - if (shouldCheckContentLength) { - var lengthAccumulator = new streams.PassThrough(); - lengthAccumulator._write = function(chunk) { - if (chunk && chunk.length) { - receivedLen += chunk.length; - } - return streams.PassThrough.prototype._write.apply(this, arguments); - }; - - lengthAccumulator.on('end', checkContentLengthAndEmit); - stream.on('error', function(err) { - shouldCheckContentLength = false; - httpStream.unpipe(lengthAccumulator); - lengthAccumulator.emit('end'); - lengthAccumulator.end(); - }); - httpStream.pipe(lengthAccumulator).pipe(stream, { end: false }); - } else { - httpStream.pipe(stream); - } - } else { - - if (shouldCheckContentLength) { - httpStream.on('data', function(arg) { - if (arg && arg.length) { - receivedLen += arg.length; - } - }); - } - - httpStream.on('data', function(arg) { - stream.emit('data', arg); - }); - httpStream.on('end', checkContentLengthAndEmit); - } - - httpStream.on('error', function(err) { - shouldCheckContentLength = false; - stream.emit('error', err); - }); - } - }); - - return stream; - }, - - /** - * @param [Array,Response] args This should be the response object, - * or an array of args to send to the event. - * @api private - */ - emitEvent: function emit(eventName, args, done) { - if (typeof args === 'function') { done = args; args = null; } - if (!done) done = function() { }; - if (!args) args = this.eventParameters(eventName, this.response); - - var origEmit = AWS.SequentialExecutor.prototype.emit; - origEmit.call(this, eventName, args, function (err) { - if (err) this.response.error = err; - done.call(this, err); - }); - }, - - /** - * @api private - */ - eventParameters: function eventParameters(eventName) { - switch (eventName) { - case 'restart': - case 'validate': - case 'sign': - case 'build': - case 'afterValidate': - case 'afterBuild': - return [this]; - case 'error': - return [this.response.error, this.response]; - default: - return [this.response]; - } - }, - - /** - * @api private - */ - presign: function presign(expires, callback) { - if (!callback && typeof expires === 'function') { - callback = expires; - expires = null; - } - return new AWS.Signers.Presign().sign(this.toGet(), expires, callback); - }, - - /** - * @api private - */ - isPresigned: function isPresigned() { - return Object.prototype.hasOwnProperty.call(this.httpRequest.headers, 'presigned-expires'); - }, - - /** - * @api private - */ - toUnauthenticated: function toUnauthenticated() { - this._unAuthenticated = true; - this.removeListener('validate', AWS.EventListeners.Core.VALIDATE_CREDENTIALS); - this.removeListener('sign', AWS.EventListeners.Core.SIGN); - return this; - }, - - /** - * @api private - */ - toGet: function toGet() { - if (this.service.api.protocol === 'query' || - this.service.api.protocol === 'ec2') { - this.removeListener('build', this.buildAsGet); - this.addListener('build', this.buildAsGet); - } - return this; - }, - - /** - * @api private - */ - buildAsGet: function buildAsGet(request) { - request.httpRequest.method = 'GET'; - request.httpRequest.path = request.service.endpoint.path + - '?' + request.httpRequest.body; - request.httpRequest.body = ''; - - // don't need these headers on a GET request - delete request.httpRequest.headers['Content-Length']; - delete request.httpRequest.headers['Content-Type']; - }, - - /** - * @api private - */ - haltHandlersOnError: function haltHandlersOnError() { - this._haltHandlersOnError = true; - } -}); - -/** - * @api private - */ -AWS.Request.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.promise = function promise() { - var self = this; - // append to user agent - this.httpRequest.appendToUserAgent('promise'); - return new PromiseDependency(function(resolve, reject) { - self.on('complete', function(resp) { - if (resp.error) { - reject(resp.error); - } else { - // define $response property so that it is not enumerable - // this prevents circular reference errors when stringifying the JSON object - resolve(Object.defineProperty( - resp.data || {}, - '$response', - {value: resp} - )); - } - }); - self.runTo(); - }); - }; -}; - -/** - * @api private - */ -AWS.Request.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.promise; -}; - -AWS.util.addPromises(AWS.Request); - -AWS.util.mixin(AWS.Request, AWS.SequentialExecutor); - - -/***/ }), - -/***/ 39925: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). You - * may not use this file except in compliance with the License. A copy of - * the License is located at - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is - * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - * ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; -var jmespath = __nccwpck_require__(87783); - -/** - * @api private - */ -function CHECK_ACCEPTORS(resp) { - var waiter = resp.request._waiter; - var acceptors = waiter.config.acceptors; - var acceptorMatched = false; - var state = 'retry'; - - acceptors.forEach(function(acceptor) { - if (!acceptorMatched) { - var matcher = waiter.matchers[acceptor.matcher]; - if (matcher && matcher(resp, acceptor.expected, acceptor.argument)) { - acceptorMatched = true; - state = acceptor.state; - } - } - }); - - if (!acceptorMatched && resp.error) state = 'failure'; - - if (state === 'success') { - waiter.setSuccess(resp); - } else { - waiter.setError(resp, state === 'retry'); - } -} - -/** - * @api private - */ -AWS.ResourceWaiter = inherit({ - /** - * Waits for a given state on a service object - * @param service [Service] the service object to wait on - * @param state [String] the state (defined in waiter configuration) to wait - * for. - * @example Create a waiter for running EC2 instances - * var ec2 = new AWS.EC2; - * var waiter = new AWS.ResourceWaiter(ec2, 'instanceRunning'); - */ - constructor: function constructor(service, state) { - this.service = service; - this.state = state; - this.loadWaiterConfig(this.state); - }, - - service: null, - - state: null, - - config: null, - - matchers: { - path: function(resp, expected, argument) { - try { - var result = jmespath.search(resp.data, argument); - } catch (err) { - return false; - } - - return jmespath.strictDeepEqual(result,expected); - }, - - pathAll: function(resp, expected, argument) { - try { - var results = jmespath.search(resp.data, argument); - } catch (err) { - return false; - } - - if (!Array.isArray(results)) results = [results]; - var numResults = results.length; - if (!numResults) return false; - for (var ind = 0 ; ind < numResults; ind++) { - if (!jmespath.strictDeepEqual(results[ind], expected)) { - return false; - } - } - return true; - }, - - pathAny: function(resp, expected, argument) { - try { - var results = jmespath.search(resp.data, argument); - } catch (err) { - return false; - } - - if (!Array.isArray(results)) results = [results]; - var numResults = results.length; - for (var ind = 0 ; ind < numResults; ind++) { - if (jmespath.strictDeepEqual(results[ind], expected)) { - return true; - } - } - return false; - }, - - status: function(resp, expected) { - var statusCode = resp.httpResponse.statusCode; - return (typeof statusCode === 'number') && (statusCode === expected); - }, - - error: function(resp, expected) { - if (typeof expected === 'string' && resp.error) { - return expected === resp.error.code; - } - // if expected is not string, can be boolean indicating presence of error - return expected === !!resp.error; - } - }, - - listeners: new AWS.SequentialExecutor().addNamedListeners(function(add) { - add('RETRY_CHECK', 'retry', function(resp) { - var waiter = resp.request._waiter; - if (resp.error && resp.error.code === 'ResourceNotReady') { - resp.error.retryDelay = (waiter.config.delay || 0) * 1000; - } - }); - - add('CHECK_OUTPUT', 'extractData', CHECK_ACCEPTORS); - - add('CHECK_ERROR', 'extractError', CHECK_ACCEPTORS); - }), - - /** - * @return [AWS.Request] - */ - wait: function wait(params, callback) { - if (typeof params === 'function') { - callback = params; params = undefined; - } - - if (params && params.$waiter) { - params = AWS.util.copy(params); - if (typeof params.$waiter.delay === 'number') { - this.config.delay = params.$waiter.delay; - } - if (typeof params.$waiter.maxAttempts === 'number') { - this.config.maxAttempts = params.$waiter.maxAttempts; - } - delete params.$waiter; - } - - var request = this.service.makeRequest(this.config.operation, params); - request._waiter = this; - request.response.maxRetries = this.config.maxAttempts; - request.addListeners(this.listeners); - - if (callback) request.send(callback); - return request; - }, - - setSuccess: function setSuccess(resp) { - resp.error = null; - resp.data = resp.data || {}; - resp.request.removeAllListeners('extractData'); - }, - - setError: function setError(resp, retryable) { - resp.data = null; - resp.error = AWS.util.error(resp.error || new Error(), { - code: 'ResourceNotReady', - message: 'Resource is not in the state ' + this.state, - retryable: retryable - }); - }, - - /** - * Loads waiter configuration from API configuration - * - * @api private - */ - loadWaiterConfig: function loadWaiterConfig(state) { - if (!this.service.api.waiters[state]) { - throw new AWS.util.error(new Error(), { - code: 'StateNotFoundError', - message: 'State ' + state + ' not found.' - }); - } - - this.config = AWS.util.copy(this.service.api.waiters[state]); - } -}); - - -/***/ }), - -/***/ 58743: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; -var jmespath = __nccwpck_require__(87783); - -/** - * This class encapsulates the response information - * from a service request operation sent through {AWS.Request}. - * The response object has two main properties for getting information - * back from a request: - * - * ## The `data` property - * - * The `response.data` property contains the serialized object data - * retrieved from the service request. For instance, for an - * Amazon DynamoDB `listTables` method call, the response data might - * look like: - * - * ``` - * > resp.data - * { TableNames: - * [ 'table1', 'table2', ... ] } - * ``` - * - * The `data` property can be null if an error occurs (see below). - * - * ## The `error` property - * - * In the event of a service error (or transfer error), the - * `response.error` property will be filled with the given - * error data in the form: - * - * ``` - * { code: 'SHORT_UNIQUE_ERROR_CODE', - * message: 'Some human readable error message' } - * ``` - * - * In the case of an error, the `data` property will be `null`. - * Note that if you handle events that can be in a failure state, - * you should always check whether `response.error` is set - * before attempting to access the `response.data` property. - * - * @!attribute data - * @readonly - * @!group Data Properties - * @note Inside of a {AWS.Request~httpData} event, this - * property contains a single raw packet instead of the - * full de-serialized service response. - * @return [Object] the de-serialized response data - * from the service. - * - * @!attribute error - * An structure containing information about a service - * or networking error. - * @readonly - * @!group Data Properties - * @note This attribute is only filled if a service or - * networking error occurs. - * @return [Error] - * * code [String] a unique short code representing the - * error that was emitted. - * * message [String] a longer human readable error message - * * retryable [Boolean] whether the error message is - * retryable. - * * statusCode [Numeric] in the case of a request that reached the service, - * this value contains the response status code. - * * time [Date] the date time object when the error occurred. - * * hostname [String] set when a networking error occurs to easily - * identify the endpoint of the request. - * * region [String] set when a networking error occurs to easily - * identify the region of the request. - * - * @!attribute requestId - * @readonly - * @!group Data Properties - * @return [String] the unique request ID associated with the response. - * Log this value when debugging requests for AWS support. - * - * @!attribute retryCount - * @readonly - * @!group Operation Properties - * @return [Integer] the number of retries that were - * attempted before the request was completed. - * - * @!attribute redirectCount - * @readonly - * @!group Operation Properties - * @return [Integer] the number of redirects that were - * followed before the request was completed. - * - * @!attribute httpResponse - * @readonly - * @!group HTTP Properties - * @return [AWS.HttpResponse] the raw HTTP response object - * containing the response headers and body information - * from the server. - * - * @see AWS.Request - */ -AWS.Response = inherit({ - - /** - * @api private - */ - constructor: function Response(request) { - this.request = request; - this.data = null; - this.error = null; - this.retryCount = 0; - this.redirectCount = 0; - this.httpResponse = new AWS.HttpResponse(); - if (request) { - this.maxRetries = request.service.numRetries(); - this.maxRedirects = request.service.config.maxRedirects; - } - }, - - /** - * Creates a new request for the next page of response data, calling the - * callback with the page data if a callback is provided. - * - * @callback callback function(err, data) - * Called when a page of data is returned from the next request. - * - * @param err [Error] an error object, if an error occurred in the request - * @param data [Object] the next page of data, or null, if there are no - * more pages left. - * @return [AWS.Request] the request object for the next page of data - * @return [null] if no callback is provided and there are no pages left - * to retrieve. - * @since v1.4.0 - */ - nextPage: function nextPage(callback) { - var config; - var service = this.request.service; - var operation = this.request.operation; - try { - config = service.paginationConfig(operation, true); - } catch (e) { this.error = e; } - - if (!this.hasNextPage()) { - if (callback) callback(this.error, null); - else if (this.error) throw this.error; - return null; - } - - var params = AWS.util.copy(this.request.params); - if (!this.nextPageTokens) { - return callback ? callback(null, null) : null; - } else { - var inputTokens = config.inputToken; - if (typeof inputTokens === 'string') inputTokens = [inputTokens]; - for (var i = 0; i < inputTokens.length; i++) { - params[inputTokens[i]] = this.nextPageTokens[i]; - } - return service.makeRequest(this.request.operation, params, callback); - } - }, - - /** - * @return [Boolean] whether more pages of data can be returned by further - * requests - * @since v1.4.0 - */ - hasNextPage: function hasNextPage() { - this.cacheNextPageTokens(); - if (this.nextPageTokens) return true; - if (this.nextPageTokens === undefined) return undefined; - else return false; - }, - - /** - * @api private - */ - cacheNextPageTokens: function cacheNextPageTokens() { - if (Object.prototype.hasOwnProperty.call(this, 'nextPageTokens')) return this.nextPageTokens; - this.nextPageTokens = undefined; - - var config = this.request.service.paginationConfig(this.request.operation); - if (!config) return this.nextPageTokens; - - this.nextPageTokens = null; - if (config.moreResults) { - if (!jmespath.search(this.data, config.moreResults)) { - return this.nextPageTokens; - } - } - - var exprs = config.outputToken; - if (typeof exprs === 'string') exprs = [exprs]; - AWS.util.arrayEach.call(this, exprs, function (expr) { - var output = jmespath.search(this.data, expr); - if (output) { - this.nextPageTokens = this.nextPageTokens || []; - this.nextPageTokens.push(output); - } - }); - - return this.nextPageTokens; - } - -}); - - -/***/ }), - -/***/ 81600: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var byteLength = AWS.util.string.byteLength; -var Buffer = AWS.util.Buffer; - -/** - * The managed uploader allows for easy and efficient uploading of buffers, - * blobs, or streams, using a configurable amount of concurrency to perform - * multipart uploads where possible. This abstraction also enables uploading - * streams of unknown size due to the use of multipart uploads. - * - * To construct a managed upload object, see the {constructor} function. - * - * ## Tracking upload progress - * - * The managed upload object can also track progress by attaching an - * 'httpUploadProgress' listener to the upload manager. This event is similar - * to {AWS.Request~httpUploadProgress} but groups all concurrent upload progress - * into a single event. See {AWS.S3.ManagedUpload~httpUploadProgress} for more - * information. - * - * ## Handling Multipart Cleanup - * - * By default, this class will automatically clean up any multipart uploads - * when an individual part upload fails. This behavior can be disabled in order - * to manually handle failures by setting the `leavePartsOnError` configuration - * option to `true` when initializing the upload object. - * - * @!event httpUploadProgress(progress) - * Triggered when the uploader has uploaded more data. - * @note The `total` property may not be set if the stream being uploaded has - * not yet finished chunking. In this case the `total` will be undefined - * until the total stream size is known. - * @note This event will not be emitted in Node.js 0.8.x. - * @param progress [map] An object containing the `loaded` and `total` bytes - * of the request and the `key` of the S3 object. Note that `total` may be undefined until the payload - * size is known. - * @context (see AWS.Request~send) - */ -AWS.S3.ManagedUpload = AWS.util.inherit({ - /** - * Creates a managed upload object with a set of configuration options. - * - * @note A "Body" parameter is required to be set prior to calling {send}. - * @note In Node.js, sending "Body" as {https://nodejs.org/dist/latest/docs/api/stream.html#stream_object_mode object-mode stream} - * may result in upload hangs. Using buffer stream is preferable. - * @option options params [map] a map of parameters to pass to the upload - * requests. The "Body" parameter is required to be specified either on - * the service or in the params option. - * @note ContentMD5 should not be provided when using the managed upload object. - * Instead, setting "computeChecksums" to true will enable automatic ContentMD5 generation - * by the managed upload object. - * @option options queueSize [Number] (4) the size of the concurrent queue - * manager to upload parts in parallel. Set to 1 for synchronous uploading - * of parts. Note that the uploader will buffer at most queueSize * partSize - * bytes into memory at any given time. - * @option options partSize [Number] (5mb) the size in bytes for each - * individual part to be uploaded. Adjust the part size to ensure the number - * of parts does not exceed {maxTotalParts}. See {minPartSize} for the - * minimum allowed part size. - * @option options leavePartsOnError [Boolean] (false) whether to abort the - * multipart upload if an error occurs. Set to true if you want to handle - * failures manually. - * @option options service [AWS.S3] an optional S3 service object to use for - * requests. This object might have bound parameters used by the uploader. - * @option options tags [Array] The tags to apply to the uploaded object. - * Each tag should have a `Key` and `Value` keys. - * @example Creating a default uploader for a stream object - * var upload = new AWS.S3.ManagedUpload({ - * params: {Bucket: 'bucket', Key: 'key', Body: stream} - * }); - * @example Creating an uploader with concurrency of 1 and partSize of 10mb - * var upload = new AWS.S3.ManagedUpload({ - * partSize: 10 * 1024 * 1024, queueSize: 1, - * params: {Bucket: 'bucket', Key: 'key', Body: stream} - * }); - * @example Creating an uploader with tags - * var upload = new AWS.S3.ManagedUpload({ - * params: {Bucket: 'bucket', Key: 'key', Body: stream}, - * tags: [{Key: 'tag1', Value: 'value1'}, {Key: 'tag2', Value: 'value2'}] - * }); - * @see send - */ - constructor: function ManagedUpload(options) { - var self = this; - AWS.SequentialExecutor.call(self); - self.body = null; - self.sliceFn = null; - self.callback = null; - self.parts = {}; - self.completeInfo = []; - self.fillQueue = function() { - self.callback(new Error('Unsupported body payload ' + typeof self.body)); - }; - - self.configure(options); - }, - - /** - * @api private - */ - configure: function configure(options) { - options = options || {}; - this.partSize = this.minPartSize; - - if (options.queueSize) this.queueSize = options.queueSize; - if (options.partSize) this.partSize = options.partSize; - if (options.leavePartsOnError) this.leavePartsOnError = true; - if (options.tags) { - if (!Array.isArray(options.tags)) { - throw new Error('Tags must be specified as an array; ' + - typeof options.tags + ' provided.'); - } - this.tags = options.tags; - } - - if (this.partSize < this.minPartSize) { - throw new Error('partSize must be greater than ' + - this.minPartSize); - } - - this.service = options.service; - this.bindServiceObject(options.params); - this.validateBody(); - this.adjustTotalBytes(); - }, - - /** - * @api private - */ - leavePartsOnError: false, - - /** - * @api private - */ - queueSize: 4, - - /** - * @api private - */ - partSize: null, - - /** - * @readonly - * @return [Number] the minimum number of bytes for an individual part - * upload. - */ - minPartSize: 1024 * 1024 * 5, - - /** - * @readonly - * @return [Number] the maximum allowed number of parts in a multipart upload. - */ - maxTotalParts: 10000, - - /** - * Initiates the managed upload for the payload. - * - * @callback callback function(err, data) - * @param err [Error] an error or null if no error occurred. - * @param data [map] The response data from the successful upload: - * * `Location` (String) the URL of the uploaded object - * * `ETag` (String) the ETag of the uploaded object - * * `Bucket` (String) the bucket to which the object was uploaded - * * `Key` (String) the key to which the object was uploaded - * @example Sending a managed upload object - * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - * var upload = new AWS.S3.ManagedUpload({params: params}); - * upload.send(function(err, data) { - * console.log(err, data); - * }); - */ - send: function(callback) { - var self = this; - self.failed = false; - self.callback = callback || function(err) { if (err) throw err; }; - - var runFill = true; - if (self.sliceFn) { - self.fillQueue = self.fillBuffer; - } else if (AWS.util.isNode()) { - var Stream = AWS.util.stream.Stream; - if (self.body instanceof Stream) { - runFill = false; - self.fillQueue = self.fillStream; - self.partBuffers = []; - self.body. - on('error', function(err) { self.cleanup(err); }). - on('readable', function() { self.fillQueue(); }). - on('end', function() { - self.isDoneChunking = true; - self.numParts = self.totalPartNumbers; - self.fillQueue.call(self); - - if (self.isDoneChunking && self.totalPartNumbers >= 1 && self.doneParts === self.numParts) { - self.finishMultiPart(); - } - }); - } - } - - if (runFill) self.fillQueue.call(self); - }, - - /** - * @!method promise() - * Returns a 'thenable' promise. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function(data) - * Called if the promise is fulfilled. - * @param data [map] The response data from the successful upload: - * `Location` (String) the URL of the uploaded object - * `ETag` (String) the ETag of the uploaded object - * `Bucket` (String) the bucket to which the object was uploaded - * `Key` (String) the key to which the object was uploaded - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] an error or null if no error occurred. - * @return [Promise] A promise that represents the state of the upload request. - * @example Sending an upload request using promises. - * var upload = s3.upload({Bucket: 'bucket', Key: 'key', Body: stream}); - * var promise = upload.promise(); - * promise.then(function(data) { ... }, function(err) { ... }); - */ - - /** - * Aborts a managed upload, including all concurrent upload requests. - * @note By default, calling this function will cleanup a multipart upload - * if one was created. To leave the multipart upload around after aborting - * a request, configure `leavePartsOnError` to `true` in the {constructor}. - * @note Calling {abort} in the browser environment will not abort any requests - * that are already in flight. If a multipart upload was created, any parts - * not yet uploaded will not be sent, and the multipart upload will be cleaned up. - * @example Aborting an upload - * var params = { - * Bucket: 'bucket', Key: 'key', - * Body: Buffer.alloc(1024 * 1024 * 25) // 25MB payload - * }; - * var upload = s3.upload(params); - * upload.send(function (err, data) { - * if (err) console.log("Error:", err.code, err.message); - * else console.log(data); - * }); - * - * // abort request in 1 second - * setTimeout(upload.abort.bind(upload), 1000); - */ - abort: function() { - var self = this; - //abort putObject request - if (self.isDoneChunking === true && self.totalPartNumbers === 1 && self.singlePart) { - self.singlePart.abort(); - } else { - self.cleanup(AWS.util.error(new Error('Request aborted by user'), { - code: 'RequestAbortedError', retryable: false - })); - } - }, - - /** - * @api private - */ - validateBody: function validateBody() { - var self = this; - self.body = self.service.config.params.Body; - if (typeof self.body === 'string') { - self.body = AWS.util.buffer.toBuffer(self.body); - } else if (!self.body) { - throw new Error('params.Body is required'); - } - self.sliceFn = AWS.util.arraySliceFn(self.body); - }, - - /** - * @api private - */ - bindServiceObject: function bindServiceObject(params) { - params = params || {}; - var self = this; - // bind parameters to new service object - if (!self.service) { - self.service = new AWS.S3({params: params}); - } else { - // Create a new S3 client from the supplied client's constructor. - var service = self.service; - var config = AWS.util.copy(service.config); - config.signatureVersion = service.getSignatureVersion(); - self.service = new service.constructor.__super__(config); - self.service.config.params = - AWS.util.merge(self.service.config.params || {}, params); - Object.defineProperty(self.service, '_originalConfig', { - get: function() { return service._originalConfig; }, - enumerable: false, - configurable: true - }); - } - }, - - /** - * @api private - */ - adjustTotalBytes: function adjustTotalBytes() { - var self = this; - try { // try to get totalBytes - self.totalBytes = byteLength(self.body); - } catch (e) { } - - // try to adjust partSize if we know payload length - if (self.totalBytes) { - var newPartSize = Math.ceil(self.totalBytes / self.maxTotalParts); - if (newPartSize > self.partSize) self.partSize = newPartSize; - } else { - self.totalBytes = undefined; - } - }, - - /** - * @api private - */ - isDoneChunking: false, - - /** - * @api private - */ - partPos: 0, - - /** - * @api private - */ - totalChunkedBytes: 0, - - /** - * @api private - */ - totalUploadedBytes: 0, - - /** - * @api private - */ - totalBytes: undefined, - - /** - * @api private - */ - numParts: 0, - - /** - * @api private - */ - totalPartNumbers: 0, - - /** - * @api private - */ - activeParts: 0, - - /** - * @api private - */ - doneParts: 0, - - /** - * @api private - */ - parts: null, - - /** - * @api private - */ - completeInfo: null, - - /** - * @api private - */ - failed: false, - - /** - * @api private - */ - multipartReq: null, - - /** - * @api private - */ - partBuffers: null, - - /** - * @api private - */ - partBufferLength: 0, - - /** - * @api private - */ - fillBuffer: function fillBuffer() { - var self = this; - var bodyLen = byteLength(self.body); - - if (bodyLen === 0) { - self.isDoneChunking = true; - self.numParts = 1; - self.nextChunk(self.body); - return; - } - - while (self.activeParts < self.queueSize && self.partPos < bodyLen) { - var endPos = Math.min(self.partPos + self.partSize, bodyLen); - var buf = self.sliceFn.call(self.body, self.partPos, endPos); - self.partPos += self.partSize; - - if (byteLength(buf) < self.partSize || self.partPos === bodyLen) { - self.isDoneChunking = true; - self.numParts = self.totalPartNumbers + 1; - } - self.nextChunk(buf); - } - }, - - /** - * @api private - */ - fillStream: function fillStream() { - var self = this; - if (self.activeParts >= self.queueSize) return; - - var buf = self.body.read(self.partSize - self.partBufferLength) || - self.body.read(); - if (buf) { - self.partBuffers.push(buf); - self.partBufferLength += buf.length; - self.totalChunkedBytes += buf.length; - } - - if (self.partBufferLength >= self.partSize) { - // if we have single buffer we avoid copyfull concat - var pbuf = self.partBuffers.length === 1 ? - self.partBuffers[0] : Buffer.concat(self.partBuffers); - self.partBuffers = []; - self.partBufferLength = 0; - - // if we have more than partSize, push the rest back on the queue - if (pbuf.length > self.partSize) { - var rest = pbuf.slice(self.partSize); - self.partBuffers.push(rest); - self.partBufferLength += rest.length; - pbuf = pbuf.slice(0, self.partSize); - } - - self.nextChunk(pbuf); - } - - if (self.isDoneChunking && !self.isDoneSending) { - // if we have single buffer we avoid copyfull concat - pbuf = self.partBuffers.length === 1 ? - self.partBuffers[0] : Buffer.concat(self.partBuffers); - self.partBuffers = []; - self.partBufferLength = 0; - self.totalBytes = self.totalChunkedBytes; - self.isDoneSending = true; - - if (self.numParts === 0 || pbuf.length > 0) { - self.numParts++; - self.nextChunk(pbuf); - } - } - - self.body.read(0); - }, - - /** - * @api private - */ - nextChunk: function nextChunk(chunk) { - var self = this; - if (self.failed) return null; - - var partNumber = ++self.totalPartNumbers; - if (self.isDoneChunking && partNumber === 1) { - var params = {Body: chunk}; - if (this.tags) { - params.Tagging = this.getTaggingHeader(); - } - var req = self.service.putObject(params); - req._managedUpload = self; - req.on('httpUploadProgress', self.progress).send(self.finishSinglePart); - self.singlePart = req; //save the single part request - return null; - } else if (self.service.config.params.ContentMD5) { - var err = AWS.util.error(new Error('The Content-MD5 you specified is invalid for multi-part uploads.'), { - code: 'InvalidDigest', retryable: false - }); - - self.cleanup(err); - return null; - } - - if (self.completeInfo[partNumber] && self.completeInfo[partNumber].ETag !== null) { - return null; // Already uploaded this part. - } - - self.activeParts++; - if (!self.service.config.params.UploadId) { - - if (!self.multipartReq) { // create multipart - self.multipartReq = self.service.createMultipartUpload(); - self.multipartReq.on('success', function(resp) { - self.service.config.params.UploadId = resp.data.UploadId; - self.multipartReq = null; - }); - self.queueChunks(chunk, partNumber); - self.multipartReq.on('error', function(err) { - self.cleanup(err); - }); - self.multipartReq.send(); - } else { - self.queueChunks(chunk, partNumber); - } - } else { // multipart is created, just send - self.uploadPart(chunk, partNumber); - } - }, - - /** - * @api private - */ - getTaggingHeader: function getTaggingHeader() { - var kvPairStrings = []; - for (var i = 0; i < this.tags.length; i++) { - kvPairStrings.push(AWS.util.uriEscape(this.tags[i].Key) + '=' + - AWS.util.uriEscape(this.tags[i].Value)); - } - - return kvPairStrings.join('&'); - }, - - /** - * @api private - */ - uploadPart: function uploadPart(chunk, partNumber) { - var self = this; - - var partParams = { - Body: chunk, - ContentLength: AWS.util.string.byteLength(chunk), - PartNumber: partNumber - }; - - var partInfo = {ETag: null, PartNumber: partNumber}; - self.completeInfo[partNumber] = partInfo; - - var req = self.service.uploadPart(partParams); - self.parts[partNumber] = req; - req._lastUploadedBytes = 0; - req._managedUpload = self; - req.on('httpUploadProgress', self.progress); - req.send(function(err, data) { - delete self.parts[partParams.PartNumber]; - self.activeParts--; - - if (!err && (!data || !data.ETag)) { - var message = 'No access to ETag property on response.'; - if (AWS.util.isBrowser()) { - message += ' Check CORS configuration to expose ETag header.'; - } - - err = AWS.util.error(new Error(message), { - code: 'ETagMissing', retryable: false - }); - } - if (err) return self.cleanup(err); - //prevent sending part being returned twice (https://github.com/aws/aws-sdk-js/issues/2304) - if (self.completeInfo[partNumber] && self.completeInfo[partNumber].ETag !== null) return null; - partInfo.ETag = data.ETag; - self.doneParts++; - if (self.isDoneChunking && self.doneParts === self.totalPartNumbers) { - self.finishMultiPart(); - } else { - self.fillQueue.call(self); - } - }); - }, - - /** - * @api private - */ - queueChunks: function queueChunks(chunk, partNumber) { - var self = this; - self.multipartReq.on('success', function() { - self.uploadPart(chunk, partNumber); - }); - }, - - /** - * @api private - */ - cleanup: function cleanup(err) { - var self = this; - if (self.failed) return; - - // clean up stream - if (typeof self.body.removeAllListeners === 'function' && - typeof self.body.resume === 'function') { - self.body.removeAllListeners('readable'); - self.body.removeAllListeners('end'); - self.body.resume(); - } - - // cleanup multipartReq listeners - if (self.multipartReq) { - self.multipartReq.removeAllListeners('success'); - self.multipartReq.removeAllListeners('error'); - self.multipartReq.removeAllListeners('complete'); - delete self.multipartReq; - } - - if (self.service.config.params.UploadId && !self.leavePartsOnError) { - self.service.abortMultipartUpload().send(); - } else if (self.leavePartsOnError) { - self.isDoneChunking = false; - } - - AWS.util.each(self.parts, function(partNumber, part) { - part.removeAllListeners('complete'); - part.abort(); - }); - - self.activeParts = 0; - self.partPos = 0; - self.numParts = 0; - self.totalPartNumbers = 0; - self.parts = {}; - self.failed = true; - self.callback(err); - }, - - /** - * @api private - */ - finishMultiPart: function finishMultiPart() { - var self = this; - var completeParams = { MultipartUpload: { Parts: self.completeInfo.slice(1) } }; - self.service.completeMultipartUpload(completeParams, function(err, data) { - if (err) { - return self.cleanup(err); - } - - if (data && typeof data.Location === 'string') { - data.Location = data.Location.replace(/%2F/g, '/'); - } - - if (Array.isArray(self.tags)) { - for (var i = 0; i < self.tags.length; i++) { - self.tags[i].Value = String(self.tags[i].Value); - } - self.service.putObjectTagging( - {Tagging: {TagSet: self.tags}}, - function(e, d) { - if (e) { - self.callback(e); - } else { - self.callback(e, data); - } - } - ); - } else { - self.callback(err, data); - } - }); - }, - - /** - * @api private - */ - finishSinglePart: function finishSinglePart(err, data) { - var upload = this.request._managedUpload; - var httpReq = this.request.httpRequest; - var endpoint = httpReq.endpoint; - if (err) return upload.callback(err); - data.Location = - [endpoint.protocol, '//', endpoint.host, httpReq.path].join(''); - data.key = this.request.params.Key; // will stay undocumented - data.Key = this.request.params.Key; - data.Bucket = this.request.params.Bucket; - upload.callback(err, data); - }, - - /** - * @api private - */ - progress: function progress(info) { - var upload = this._managedUpload; - if (this.operation === 'putObject') { - info.part = 1; - info.key = this.params.Key; - } else { - upload.totalUploadedBytes += info.loaded - this._lastUploadedBytes; - this._lastUploadedBytes = info.loaded; - info = { - loaded: upload.totalUploadedBytes, - total: upload.totalBytes, - part: this.params.PartNumber, - key: this.params.Key - }; - } - upload.emit('httpUploadProgress', [info]); - } -}); - -AWS.util.mixin(AWS.S3.ManagedUpload, AWS.SequentialExecutor); - -/** - * @api private - */ -AWS.S3.ManagedUpload.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.promise = AWS.util.promisifyMethod('send', PromiseDependency); -}; - -/** - * @api private - */ -AWS.S3.ManagedUpload.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.promise; -}; - -AWS.util.addPromises(AWS.S3.ManagedUpload); - -/** - * @api private - */ -module.exports = AWS.S3.ManagedUpload; - - -/***/ }), - -/***/ 55948: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * @api private - * @!method on(eventName, callback) - * Registers an event listener callback for the event given by `eventName`. - * Parameters passed to the callback function depend on the individual event - * being triggered. See the event documentation for those parameters. - * - * @param eventName [String] the event name to register the listener for - * @param callback [Function] the listener callback function - * @param toHead [Boolean] attach the listener callback to the head of callback array if set to true. - * Default to be false. - * @return [AWS.SequentialExecutor] the same object for chaining - */ -AWS.SequentialExecutor = AWS.util.inherit({ - - constructor: function SequentialExecutor() { - this._events = {}; - }, - - /** - * @api private - */ - listeners: function listeners(eventName) { - return this._events[eventName] ? this._events[eventName].slice(0) : []; - }, - - on: function on(eventName, listener, toHead) { - if (this._events[eventName]) { - toHead ? - this._events[eventName].unshift(listener) : - this._events[eventName].push(listener); - } else { - this._events[eventName] = [listener]; - } - return this; - }, - - onAsync: function onAsync(eventName, listener, toHead) { - listener._isAsync = true; - return this.on(eventName, listener, toHead); - }, - - removeListener: function removeListener(eventName, listener) { - var listeners = this._events[eventName]; - if (listeners) { - var length = listeners.length; - var position = -1; - for (var i = 0; i < length; ++i) { - if (listeners[i] === listener) { - position = i; - } - } - if (position > -1) { - listeners.splice(position, 1); - } - } - return this; - }, - - removeAllListeners: function removeAllListeners(eventName) { - if (eventName) { - delete this._events[eventName]; - } else { - this._events = {}; - } - return this; - }, - - /** - * @api private - */ - emit: function emit(eventName, eventArgs, doneCallback) { - if (!doneCallback) doneCallback = function() { }; - var listeners = this.listeners(eventName); - var count = listeners.length; - this.callListeners(listeners, eventArgs, doneCallback); - return count > 0; - }, - - /** - * @api private - */ - callListeners: function callListeners(listeners, args, doneCallback, prevError) { - var self = this; - var error = prevError || null; - - function callNextListener(err) { - if (err) { - error = AWS.util.error(error || new Error(), err); - if (self._haltHandlersOnError) { - return doneCallback.call(self, error); - } - } - self.callListeners(listeners, args, doneCallback, error); - } - - while (listeners.length > 0) { - var listener = listeners.shift(); - if (listener._isAsync) { // asynchronous listener - listener.apply(self, args.concat([callNextListener])); - return; // stop here, callNextListener will continue - } else { // synchronous listener - try { - listener.apply(self, args); - } catch (err) { - error = AWS.util.error(error || new Error(), err); - } - if (error && self._haltHandlersOnError) { - doneCallback.call(self, error); - return; - } - } - } - doneCallback.call(self, error); - }, - - /** - * Adds or copies a set of listeners from another list of - * listeners or SequentialExecutor object. - * - * @param listeners [map>, AWS.SequentialExecutor] - * a list of events and callbacks, or an event emitter object - * containing listeners to add to this emitter object. - * @return [AWS.SequentialExecutor] the emitter object, for chaining. - * @example Adding listeners from a map of listeners - * emitter.addListeners({ - * event1: [function() { ... }, function() { ... }], - * event2: [function() { ... }] - * }); - * emitter.emit('event1'); // emitter has event1 - * emitter.emit('event2'); // emitter has event2 - * @example Adding listeners from another emitter object - * var emitter1 = new AWS.SequentialExecutor(); - * emitter1.on('event1', function() { ... }); - * emitter1.on('event2', function() { ... }); - * var emitter2 = new AWS.SequentialExecutor(); - * emitter2.addListeners(emitter1); - * emitter2.emit('event1'); // emitter2 has event1 - * emitter2.emit('event2'); // emitter2 has event2 - */ - addListeners: function addListeners(listeners) { - var self = this; - - // extract listeners if parameter is an SequentialExecutor object - if (listeners._events) listeners = listeners._events; - - AWS.util.each(listeners, function(event, callbacks) { - if (typeof callbacks === 'function') callbacks = [callbacks]; - AWS.util.arrayEach(callbacks, function(callback) { - self.on(event, callback); - }); - }); - - return self; - }, - - /** - * Registers an event with {on} and saves the callback handle function - * as a property on the emitter object using a given `name`. - * - * @param name [String] the property name to set on this object containing - * the callback function handle so that the listener can be removed in - * the future. - * @param (see on) - * @return (see on) - * @example Adding a named listener DATA_CALLBACK - * var listener = function() { doSomething(); }; - * emitter.addNamedListener('DATA_CALLBACK', 'data', listener); - * - * // the following prints: true - * console.log(emitter.DATA_CALLBACK == listener); - */ - addNamedListener: function addNamedListener(name, eventName, callback, toHead) { - this[name] = callback; - this.addListener(eventName, callback, toHead); - return this; - }, - - /** - * @api private - */ - addNamedAsyncListener: function addNamedAsyncListener(name, eventName, callback, toHead) { - callback._isAsync = true; - return this.addNamedListener(name, eventName, callback, toHead); - }, - - /** - * Helper method to add a set of named listeners using - * {addNamedListener}. The callback contains a parameter - * with a handle to the `addNamedListener` method. - * - * @callback callback function(add) - * The callback function is called immediately in order to provide - * the `add` function to the block. This simplifies the addition of - * a large group of named listeners. - * @param add [Function] the {addNamedListener} function to call - * when registering listeners. - * @example Adding a set of named listeners - * emitter.addNamedListeners(function(add) { - * add('DATA_CALLBACK', 'data', function() { ... }); - * add('OTHER', 'otherEvent', function() { ... }); - * add('LAST', 'lastEvent', function() { ... }); - * }); - * - * // these properties are now set: - * emitter.DATA_CALLBACK; - * emitter.OTHER; - * emitter.LAST; - */ - addNamedListeners: function addNamedListeners(callback) { - var self = this; - callback( - function() { - self.addNamedListener.apply(self, arguments); - }, - function() { - self.addNamedAsyncListener.apply(self, arguments); - } - ); - return this; - } -}); - -/** - * {on} is the prefered method. - * @api private - */ -AWS.SequentialExecutor.prototype.addListener = AWS.SequentialExecutor.prototype.on; - -/** - * @api private - */ -module.exports = AWS.SequentialExecutor; - - -/***/ }), - -/***/ 68903: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var Api = __nccwpck_require__(17657); -var regionConfig = __nccwpck_require__(18262); - -var inherit = AWS.util.inherit; -var clientCount = 0; -var region_utils = __nccwpck_require__(99517); - -/** - * The service class representing an AWS service. - * - * @class_abstract This class is an abstract class. - * - * @!attribute apiVersions - * @return [Array] the list of API versions supported by this service. - * @readonly - */ -AWS.Service = inherit({ - /** - * Create a new service object with a configuration object - * - * @param config [map] a map of configuration options - */ - constructor: function Service(config) { - if (!this.loadServiceClass) { - throw AWS.util.error(new Error(), - 'Service must be constructed with `new\' operator'); - } - - if (config) { - if (config.region) { - var region = config.region; - if (region_utils.isFipsRegion(region)) { - config.region = region_utils.getRealRegion(region); - config.useFipsEndpoint = true; - } - if (region_utils.isGlobalRegion(region)) { - config.region = region_utils.getRealRegion(region); - } - } - if (typeof config.useDualstack === 'boolean' - && typeof config.useDualstackEndpoint !== 'boolean') { - config.useDualstackEndpoint = config.useDualstack; - } - } - - var ServiceClass = this.loadServiceClass(config || {}); - if (ServiceClass) { - var originalConfig = AWS.util.copy(config); - var svc = new ServiceClass(config); - Object.defineProperty(svc, '_originalConfig', { - get: function() { return originalConfig; }, - enumerable: false, - configurable: true - }); - svc._clientId = ++clientCount; - return svc; - } - this.initialize(config); - }, - - /** - * @api private - */ - initialize: function initialize(config) { - var svcConfig = AWS.config[this.serviceIdentifier]; - this.config = new AWS.Config(AWS.config); - if (svcConfig) this.config.update(svcConfig, true); - if (config) this.config.update(config, true); - - this.validateService(); - if (!this.config.endpoint) regionConfig.configureEndpoint(this); - - this.config.endpoint = this.endpointFromTemplate(this.config.endpoint); - this.setEndpoint(this.config.endpoint); - //enable attaching listeners to service client - AWS.SequentialExecutor.call(this); - AWS.Service.addDefaultMonitoringListeners(this); - if ((this.config.clientSideMonitoring || AWS.Service._clientSideMonitoring) && this.publisher) { - var publisher = this.publisher; - this.addNamedListener('PUBLISH_API_CALL', 'apiCall', function PUBLISH_API_CALL(event) { - process.nextTick(function() {publisher.eventHandler(event);}); - }); - this.addNamedListener('PUBLISH_API_ATTEMPT', 'apiCallAttempt', function PUBLISH_API_ATTEMPT(event) { - process.nextTick(function() {publisher.eventHandler(event);}); - }); - } - }, - - /** - * @api private - */ - validateService: function validateService() { - }, - - /** - * @api private - */ - loadServiceClass: function loadServiceClass(serviceConfig) { - var config = serviceConfig; - if (!AWS.util.isEmpty(this.api)) { - return null; - } else if (config.apiConfig) { - return AWS.Service.defineServiceApi(this.constructor, config.apiConfig); - } else if (!this.constructor.services) { - return null; - } else { - config = new AWS.Config(AWS.config); - config.update(serviceConfig, true); - var version = config.apiVersions[this.constructor.serviceIdentifier]; - version = version || config.apiVersion; - return this.getLatestServiceClass(version); - } - }, - - /** - * @api private - */ - getLatestServiceClass: function getLatestServiceClass(version) { - version = this.getLatestServiceVersion(version); - if (this.constructor.services[version] === null) { - AWS.Service.defineServiceApi(this.constructor, version); - } - - return this.constructor.services[version]; - }, - - /** - * @api private - */ - getLatestServiceVersion: function getLatestServiceVersion(version) { - if (!this.constructor.services || this.constructor.services.length === 0) { - throw new Error('No services defined on ' + - this.constructor.serviceIdentifier); - } - - if (!version) { - version = 'latest'; - } else if (AWS.util.isType(version, Date)) { - version = AWS.util.date.iso8601(version).split('T')[0]; - } - - if (Object.hasOwnProperty(this.constructor.services, version)) { - return version; - } - - var keys = Object.keys(this.constructor.services).sort(); - var selectedVersion = null; - for (var i = keys.length - 1; i >= 0; i--) { - // versions that end in "*" are not available on disk and can be - // skipped, so do not choose these as selectedVersions - if (keys[i][keys[i].length - 1] !== '*') { - selectedVersion = keys[i]; - } - if (keys[i].substr(0, 10) <= version) { - return selectedVersion; - } - } - - throw new Error('Could not find ' + this.constructor.serviceIdentifier + - ' API to satisfy version constraint `' + version + '\''); - }, - - /** - * @api private - */ - api: {}, - - /** - * @api private - */ - defaultRetryCount: 3, - - /** - * @api private - */ - customizeRequests: function customizeRequests(callback) { - if (!callback) { - this.customRequestHandler = null; - } else if (typeof callback === 'function') { - this.customRequestHandler = callback; - } else { - throw new Error('Invalid callback type \'' + typeof callback + '\' provided in customizeRequests'); - } - }, - - /** - * Calls an operation on a service with the given input parameters. - * - * @param operation [String] the name of the operation to call on the service. - * @param params [map] a map of input options for the operation - * @callback callback function(err, data) - * If a callback is supplied, it is called when a response is returned - * from the service. - * @param err [Error] the error object returned from the request. - * Set to `null` if the request is successful. - * @param data [Object] the de-serialized data returned from - * the request. Set to `null` if a request error occurs. - */ - makeRequest: function makeRequest(operation, params, callback) { - if (typeof params === 'function') { - callback = params; - params = null; - } - - params = params || {}; - if (this.config.params) { // copy only toplevel bound params - var rules = this.api.operations[operation]; - if (rules) { - params = AWS.util.copy(params); - AWS.util.each(this.config.params, function(key, value) { - if (rules.input.members[key]) { - if (params[key] === undefined || params[key] === null) { - params[key] = value; - } - } - }); - } - } - - var request = new AWS.Request(this, operation, params); - this.addAllRequestListeners(request); - this.attachMonitoringEmitter(request); - if (callback) request.send(callback); - return request; - }, - - /** - * Calls an operation on a service with the given input parameters, without - * any authentication data. This method is useful for "public" API operations. - * - * @param operation [String] the name of the operation to call on the service. - * @param params [map] a map of input options for the operation - * @callback callback function(err, data) - * If a callback is supplied, it is called when a response is returned - * from the service. - * @param err [Error] the error object returned from the request. - * Set to `null` if the request is successful. - * @param data [Object] the de-serialized data returned from - * the request. Set to `null` if a request error occurs. - */ - makeUnauthenticatedRequest: function makeUnauthenticatedRequest(operation, params, callback) { - if (typeof params === 'function') { - callback = params; - params = {}; - } - - var request = this.makeRequest(operation, params).toUnauthenticated(); - return callback ? request.send(callback) : request; - }, - - /** - * Waits for a given state - * - * @param state [String] the state on the service to wait for - * @param params [map] a map of parameters to pass with each request - * @option params $waiter [map] a map of configuration options for the waiter - * @option params $waiter.delay [Number] The number of seconds to wait between - * requests - * @option params $waiter.maxAttempts [Number] The maximum number of requests - * to send while waiting - * @callback callback function(err, data) - * If a callback is supplied, it is called when a response is returned - * from the service. - * @param err [Error] the error object returned from the request. - * Set to `null` if the request is successful. - * @param data [Object] the de-serialized data returned from - * the request. Set to `null` if a request error occurs. - */ - waitFor: function waitFor(state, params, callback) { - var waiter = new AWS.ResourceWaiter(this, state); - return waiter.wait(params, callback); - }, - - /** - * @api private - */ - addAllRequestListeners: function addAllRequestListeners(request) { - var list = [AWS.events, AWS.EventListeners.Core, this.serviceInterface(), - AWS.EventListeners.CorePost]; - for (var i = 0; i < list.length; i++) { - if (list[i]) request.addListeners(list[i]); - } - - // disable parameter validation - if (!this.config.paramValidation) { - request.removeListener('validate', - AWS.EventListeners.Core.VALIDATE_PARAMETERS); - } - - if (this.config.logger) { // add logging events - request.addListeners(AWS.EventListeners.Logger); - } - - this.setupRequestListeners(request); - // call prototype's customRequestHandler - if (typeof this.constructor.prototype.customRequestHandler === 'function') { - this.constructor.prototype.customRequestHandler(request); - } - // call instance's customRequestHandler - if (Object.prototype.hasOwnProperty.call(this, 'customRequestHandler') && typeof this.customRequestHandler === 'function') { - this.customRequestHandler(request); - } - }, - - /** - * Event recording metrics for a whole API call. - * @returns {object} a subset of api call metrics - * @api private - */ - apiCallEvent: function apiCallEvent(request) { - var api = request.service.api.operations[request.operation]; - var monitoringEvent = { - Type: 'ApiCall', - Api: api ? api.name : request.operation, - Version: 1, - Service: request.service.api.serviceId || request.service.api.endpointPrefix, - Region: request.httpRequest.region, - MaxRetriesExceeded: 0, - UserAgent: request.httpRequest.getUserAgent(), - }; - var response = request.response; - if (response.httpResponse.statusCode) { - monitoringEvent.FinalHttpStatusCode = response.httpResponse.statusCode; - } - if (response.error) { - var error = response.error; - var statusCode = response.httpResponse.statusCode; - if (statusCode > 299) { - if (error.code) monitoringEvent.FinalAwsException = error.code; - if (error.message) monitoringEvent.FinalAwsExceptionMessage = error.message; - } else { - if (error.code || error.name) monitoringEvent.FinalSdkException = error.code || error.name; - if (error.message) monitoringEvent.FinalSdkExceptionMessage = error.message; - } - } - return monitoringEvent; - }, - - /** - * Event recording metrics for an API call attempt. - * @returns {object} a subset of api call attempt metrics - * @api private - */ - apiAttemptEvent: function apiAttemptEvent(request) { - var api = request.service.api.operations[request.operation]; - var monitoringEvent = { - Type: 'ApiCallAttempt', - Api: api ? api.name : request.operation, - Version: 1, - Service: request.service.api.serviceId || request.service.api.endpointPrefix, - Fqdn: request.httpRequest.endpoint.hostname, - UserAgent: request.httpRequest.getUserAgent(), - }; - var response = request.response; - if (response.httpResponse.statusCode) { - monitoringEvent.HttpStatusCode = response.httpResponse.statusCode; - } - if ( - !request._unAuthenticated && - request.service.config.credentials && - request.service.config.credentials.accessKeyId - ) { - monitoringEvent.AccessKey = request.service.config.credentials.accessKeyId; - } - if (!response.httpResponse.headers) return monitoringEvent; - if (request.httpRequest.headers['x-amz-security-token']) { - monitoringEvent.SessionToken = request.httpRequest.headers['x-amz-security-token']; - } - if (response.httpResponse.headers['x-amzn-requestid']) { - monitoringEvent.XAmznRequestId = response.httpResponse.headers['x-amzn-requestid']; - } - if (response.httpResponse.headers['x-amz-request-id']) { - monitoringEvent.XAmzRequestId = response.httpResponse.headers['x-amz-request-id']; - } - if (response.httpResponse.headers['x-amz-id-2']) { - monitoringEvent.XAmzId2 = response.httpResponse.headers['x-amz-id-2']; - } - return monitoringEvent; - }, - - /** - * Add metrics of failed request. - * @api private - */ - attemptFailEvent: function attemptFailEvent(request) { - var monitoringEvent = this.apiAttemptEvent(request); - var response = request.response; - var error = response.error; - if (response.httpResponse.statusCode > 299 ) { - if (error.code) monitoringEvent.AwsException = error.code; - if (error.message) monitoringEvent.AwsExceptionMessage = error.message; - } else { - if (error.code || error.name) monitoringEvent.SdkException = error.code || error.name; - if (error.message) monitoringEvent.SdkExceptionMessage = error.message; - } - return monitoringEvent; - }, - - /** - * Attach listeners to request object to fetch metrics of each request - * and emit data object through \'ApiCall\' and \'ApiCallAttempt\' events. - * @api private - */ - attachMonitoringEmitter: function attachMonitoringEmitter(request) { - var attemptTimestamp; //timestamp marking the beginning of a request attempt - var attemptStartRealTime; //Start time of request attempt. Used to calculating attemptLatency - var attemptLatency; //latency from request sent out to http response reaching SDK - var callStartRealTime; //Start time of API call. Used to calculating API call latency - var attemptCount = 0; //request.retryCount is not reliable here - var region; //region cache region for each attempt since it can be updated in plase (e.g. s3) - var callTimestamp; //timestamp when the request is created - var self = this; - var addToHead = true; - - request.on('validate', function () { - callStartRealTime = AWS.util.realClock.now(); - callTimestamp = Date.now(); - }, addToHead); - request.on('sign', function () { - attemptStartRealTime = AWS.util.realClock.now(); - attemptTimestamp = Date.now(); - region = request.httpRequest.region; - attemptCount++; - }, addToHead); - request.on('validateResponse', function() { - attemptLatency = Math.round(AWS.util.realClock.now() - attemptStartRealTime); - }); - request.addNamedListener('API_CALL_ATTEMPT', 'success', function API_CALL_ATTEMPT() { - var apiAttemptEvent = self.apiAttemptEvent(request); - apiAttemptEvent.Timestamp = attemptTimestamp; - apiAttemptEvent.AttemptLatency = attemptLatency >= 0 ? attemptLatency : 0; - apiAttemptEvent.Region = region; - self.emit('apiCallAttempt', [apiAttemptEvent]); - }); - request.addNamedListener('API_CALL_ATTEMPT_RETRY', 'retry', function API_CALL_ATTEMPT_RETRY() { - var apiAttemptEvent = self.attemptFailEvent(request); - apiAttemptEvent.Timestamp = attemptTimestamp; - //attemptLatency may not be available if fail before response - attemptLatency = attemptLatency || - Math.round(AWS.util.realClock.now() - attemptStartRealTime); - apiAttemptEvent.AttemptLatency = attemptLatency >= 0 ? attemptLatency : 0; - apiAttemptEvent.Region = region; - self.emit('apiCallAttempt', [apiAttemptEvent]); - }); - request.addNamedListener('API_CALL', 'complete', function API_CALL() { - var apiCallEvent = self.apiCallEvent(request); - apiCallEvent.AttemptCount = attemptCount; - if (apiCallEvent.AttemptCount <= 0) return; - apiCallEvent.Timestamp = callTimestamp; - var latency = Math.round(AWS.util.realClock.now() - callStartRealTime); - apiCallEvent.Latency = latency >= 0 ? latency : 0; - var response = request.response; - if ( - response.error && - response.error.retryable && - typeof response.retryCount === 'number' && - typeof response.maxRetries === 'number' && - (response.retryCount >= response.maxRetries) - ) { - apiCallEvent.MaxRetriesExceeded = 1; - } - self.emit('apiCall', [apiCallEvent]); - }); - }, - - /** - * Override this method to setup any custom request listeners for each - * new request to the service. - * - * @method_abstract This is an abstract method. - */ - setupRequestListeners: function setupRequestListeners(request) { - }, - - /** - * Gets the signing name for a given request - * @api private - */ - getSigningName: function getSigningName() { - return this.api.signingName || this.api.endpointPrefix; - }, - - /** - * Gets the signer class for a given request - * @api private - */ - getSignerClass: function getSignerClass(request) { - var version; - // get operation authtype if present - var operation = null; - var authtype = ''; - if (request) { - var operations = request.service.api.operations || {}; - operation = operations[request.operation] || null; - authtype = operation ? operation.authtype : ''; - } - if (this.config.signatureVersion) { - version = this.config.signatureVersion; - } else if (authtype === 'v4' || authtype === 'v4-unsigned-body') { - version = 'v4'; - } else if (authtype === 'bearer') { - version = 'bearer'; - } else { - version = this.api.signatureVersion; - } - return AWS.Signers.RequestSigner.getVersion(version); - }, - - /** - * @api private - */ - serviceInterface: function serviceInterface() { - switch (this.api.protocol) { - case 'ec2': return AWS.EventListeners.Query; - case 'query': return AWS.EventListeners.Query; - case 'json': return AWS.EventListeners.Json; - case 'rest-json': return AWS.EventListeners.RestJson; - case 'rest-xml': return AWS.EventListeners.RestXml; - } - if (this.api.protocol) { - throw new Error('Invalid service `protocol\' ' + - this.api.protocol + ' in API config'); - } - }, - - /** - * @api private - */ - successfulResponse: function successfulResponse(resp) { - return resp.httpResponse.statusCode < 300; - }, - - /** - * How many times a failed request should be retried before giving up. - * the defaultRetryCount can be overriden by service classes. - * - * @api private - */ - numRetries: function numRetries() { - if (this.config.maxRetries !== undefined) { - return this.config.maxRetries; - } else { - return this.defaultRetryCount; - } - }, - - /** - * @api private - */ - retryDelays: function retryDelays(retryCount, err) { - return AWS.util.calculateRetryDelay(retryCount, this.config.retryDelayOptions, err); - }, - - /** - * @api private - */ - retryableError: function retryableError(error) { - if (this.timeoutError(error)) return true; - if (this.networkingError(error)) return true; - if (this.expiredCredentialsError(error)) return true; - if (this.throttledError(error)) return true; - if (error.statusCode >= 500) return true; - return false; - }, - - /** - * @api private - */ - networkingError: function networkingError(error) { - return error.code === 'NetworkingError'; - }, - - /** - * @api private - */ - timeoutError: function timeoutError(error) { - return error.code === 'TimeoutError'; - }, - - /** - * @api private - */ - expiredCredentialsError: function expiredCredentialsError(error) { - // TODO : this only handles *one* of the expired credential codes - return (error.code === 'ExpiredTokenException'); - }, - - /** - * @api private - */ - clockSkewError: function clockSkewError(error) { - switch (error.code) { - case 'RequestTimeTooSkewed': - case 'RequestExpired': - case 'InvalidSignatureException': - case 'SignatureDoesNotMatch': - case 'AuthFailure': - case 'RequestInTheFuture': - return true; - default: return false; - } - }, - - /** - * @api private - */ - getSkewCorrectedDate: function getSkewCorrectedDate() { - return new Date(Date.now() + this.config.systemClockOffset); - }, - - /** - * @api private - */ - applyClockOffset: function applyClockOffset(newServerTime) { - if (newServerTime) { - this.config.systemClockOffset = newServerTime - Date.now(); - } - }, - - /** - * @api private - */ - isClockSkewed: function isClockSkewed(newServerTime) { - if (newServerTime) { - return Math.abs(this.getSkewCorrectedDate().getTime() - newServerTime) >= 300000; - } - }, - - /** - * @api private - */ - throttledError: function throttledError(error) { - // this logic varies between services - if (error.statusCode === 429) return true; - switch (error.code) { - case 'ProvisionedThroughputExceededException': - case 'Throttling': - case 'ThrottlingException': - case 'RequestLimitExceeded': - case 'RequestThrottled': - case 'RequestThrottledException': - case 'TooManyRequestsException': - case 'TransactionInProgressException': //dynamodb - case 'EC2ThrottledException': - return true; - default: - return false; - } - }, - - /** - * @api private - */ - endpointFromTemplate: function endpointFromTemplate(endpoint) { - if (typeof endpoint !== 'string') return endpoint; - - var e = endpoint; - e = e.replace(/\{service\}/g, this.api.endpointPrefix); - e = e.replace(/\{region\}/g, this.config.region); - e = e.replace(/\{scheme\}/g, this.config.sslEnabled ? 'https' : 'http'); - return e; - }, - - /** - * @api private - */ - setEndpoint: function setEndpoint(endpoint) { - this.endpoint = new AWS.Endpoint(endpoint, this.config); - }, - - /** - * @api private - */ - paginationConfig: function paginationConfig(operation, throwException) { - var paginator = this.api.operations[operation].paginator; - if (!paginator) { - if (throwException) { - var e = new Error(); - throw AWS.util.error(e, 'No pagination configuration for ' + operation); - } - return null; - } - - return paginator; - } -}); - -AWS.util.update(AWS.Service, { - - /** - * Adds one method for each operation described in the api configuration - * - * @api private - */ - defineMethods: function defineMethods(svc) { - AWS.util.each(svc.prototype.api.operations, function iterator(method) { - if (svc.prototype[method]) return; - var operation = svc.prototype.api.operations[method]; - if (operation.authtype === 'none') { - svc.prototype[method] = function (params, callback) { - return this.makeUnauthenticatedRequest(method, params, callback); - }; - } else { - svc.prototype[method] = function (params, callback) { - return this.makeRequest(method, params, callback); - }; - } - }); - }, - - /** - * Defines a new Service class using a service identifier and list of versions - * including an optional set of features (functions) to apply to the class - * prototype. - * - * @param serviceIdentifier [String] the identifier for the service - * @param versions [Array] a list of versions that work with this - * service - * @param features [Object] an object to attach to the prototype - * @return [Class] the service class defined by this function. - */ - defineService: function defineService(serviceIdentifier, versions, features) { - AWS.Service._serviceMap[serviceIdentifier] = true; - if (!Array.isArray(versions)) { - features = versions; - versions = []; - } - - var svc = inherit(AWS.Service, features || {}); - - if (typeof serviceIdentifier === 'string') { - AWS.Service.addVersions(svc, versions); - - var identifier = svc.serviceIdentifier || serviceIdentifier; - svc.serviceIdentifier = identifier; - } else { // defineService called with an API - svc.prototype.api = serviceIdentifier; - AWS.Service.defineMethods(svc); - } - AWS.SequentialExecutor.call(this.prototype); - //util.clientSideMonitoring is only available in node - if (!this.prototype.publisher && AWS.util.clientSideMonitoring) { - var Publisher = AWS.util.clientSideMonitoring.Publisher; - var configProvider = AWS.util.clientSideMonitoring.configProvider; - var publisherConfig = configProvider(); - this.prototype.publisher = new Publisher(publisherConfig); - if (publisherConfig.enabled) { - //if csm is enabled in environment, SDK should send all metrics - AWS.Service._clientSideMonitoring = true; - } - } - AWS.SequentialExecutor.call(svc.prototype); - AWS.Service.addDefaultMonitoringListeners(svc.prototype); - return svc; - }, - - /** - * @api private - */ - addVersions: function addVersions(svc, versions) { - if (!Array.isArray(versions)) versions = [versions]; - - svc.services = svc.services || {}; - for (var i = 0; i < versions.length; i++) { - if (svc.services[versions[i]] === undefined) { - svc.services[versions[i]] = null; - } - } - - svc.apiVersions = Object.keys(svc.services).sort(); - }, - - /** - * @api private - */ - defineServiceApi: function defineServiceApi(superclass, version, apiConfig) { - var svc = inherit(superclass, { - serviceIdentifier: superclass.serviceIdentifier - }); - - function setApi(api) { - if (api.isApi) { - svc.prototype.api = api; - } else { - svc.prototype.api = new Api(api, { - serviceIdentifier: superclass.serviceIdentifier - }); - } - } - - if (typeof version === 'string') { - if (apiConfig) { - setApi(apiConfig); - } else { - try { - setApi(AWS.apiLoader(superclass.serviceIdentifier, version)); - } catch (err) { - throw AWS.util.error(err, { - message: 'Could not find API configuration ' + - superclass.serviceIdentifier + '-' + version - }); - } - } - if (!Object.prototype.hasOwnProperty.call(superclass.services, version)) { - superclass.apiVersions = superclass.apiVersions.concat(version).sort(); - } - superclass.services[version] = svc; - } else { - setApi(version); - } - - AWS.Service.defineMethods(svc); - return svc; - }, - - /** - * @api private - */ - hasService: function(identifier) { - return Object.prototype.hasOwnProperty.call(AWS.Service._serviceMap, identifier); - }, - - /** - * @param attachOn attach default monitoring listeners to object - * - * Each monitoring event should be emitted from service client to service constructor prototype and then - * to global service prototype like bubbling up. These default monitoring events listener will transfer - * the monitoring events to the upper layer. - * @api private - */ - addDefaultMonitoringListeners: function addDefaultMonitoringListeners(attachOn) { - attachOn.addNamedListener('MONITOR_EVENTS_BUBBLE', 'apiCallAttempt', function EVENTS_BUBBLE(event) { - var baseClass = Object.getPrototypeOf(attachOn); - if (baseClass._events) baseClass.emit('apiCallAttempt', [event]); - }); - attachOn.addNamedListener('CALL_EVENTS_BUBBLE', 'apiCall', function CALL_EVENTS_BUBBLE(event) { - var baseClass = Object.getPrototypeOf(attachOn); - if (baseClass._events) baseClass.emit('apiCall', [event]); - }); - }, - - /** - * @api private - */ - _serviceMap: {} -}); - -AWS.util.mixin(AWS.Service, AWS.SequentialExecutor); - -/** - * @api private - */ -module.exports = AWS.Service; - - -/***/ }), - -/***/ 4338: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.APIGateway.prototype, { -/** - * Sets the Accept header to application/json. - * - * @api private - */ - setAcceptHeader: function setAcceptHeader(req) { - var httpRequest = req.httpRequest; - if (!httpRequest.headers.Accept) { - httpRequest.headers['Accept'] = 'application/json'; - } - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('build', this.setAcceptHeader); - if (request.operation === 'getExport') { - var params = request.params || {}; - if (params.exportType === 'swagger') { - request.addListener('extractData', AWS.util.convertPayloadToString); - } - } - } -}); - - - -/***/ }), - -/***/ 95483: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -// pull in CloudFront signer -__nccwpck_require__(93260); - -AWS.util.update(AWS.CloudFront.prototype, { - - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('extractData', AWS.util.hoistPayloadMember); - } - -}); - - -/***/ }), - -/***/ 48571: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * Constructs a service interface object. Each API operation is exposed as a - * function on service. - * - * ### Sending a Request Using CloudSearchDomain - * - * ```javascript - * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); - * csd.search(params, function (err, data) { - * if (err) console.log(err, err.stack); // an error occurred - * else console.log(data); // successful response - * }); - * ``` - * - * ### Locking the API Version - * - * In order to ensure that the CloudSearchDomain object uses this specific API, - * you can construct the object by passing the `apiVersion` option to the - * constructor: - * - * ```javascript - * var csd = new AWS.CloudSearchDomain({ - * endpoint: 'my.host.tld', - * apiVersion: '2013-01-01' - * }); - * ``` - * - * You can also set the API version globally in `AWS.config.apiVersions` using - * the **cloudsearchdomain** service identifier: - * - * ```javascript - * AWS.config.apiVersions = { - * cloudsearchdomain: '2013-01-01', - * // other service API versions - * }; - * - * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); - * ``` - * - * @note You *must* provide an `endpoint` configuration parameter when - * constructing this service. See {constructor} for more information. - * - * @!method constructor(options = {}) - * Constructs a service object. This object has one method for each - * API operation. - * - * @example Constructing a CloudSearchDomain object - * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); - * @note You *must* provide an `endpoint` when constructing this service. - * @option (see AWS.Config.constructor) - * - * @service cloudsearchdomain - * @version 2013-01-01 - */ -AWS.util.update(AWS.CloudSearchDomain.prototype, { - /** - * @api private - */ - validateService: function validateService() { - if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { - var msg = 'AWS.CloudSearchDomain requires an explicit ' + - '`endpoint\' configuration option.'; - throw AWS.util.error(new Error(), - {name: 'InvalidEndpoint', message: msg}); - } - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.removeListener('validate', - AWS.EventListeners.Core.VALIDATE_CREDENTIALS - ); - request.onAsync('validate', this.validateCredentials); - request.addListener('validate', this.updateRegion); - if (request.operation === 'search') { - request.addListener('build', this.convertGetToPost); - } - }, - - /** - * @api private - */ - validateCredentials: function(req, done) { - if (!req.service.api.signatureVersion) return done(); // none - req.service.config.getCredentials(function(err) { - if (err) { - req.removeListener('sign', AWS.EventListeners.Core.SIGN); - } - done(); - }); - }, - - /** - * @api private - */ - convertGetToPost: function(request) { - var httpRequest = request.httpRequest; - // convert queries to POST to avoid length restrictions - var path = httpRequest.path.split('?'); - httpRequest.method = 'POST'; - httpRequest.path = path[0]; - httpRequest.body = path[1]; - httpRequest.headers['Content-Length'] = httpRequest.body.length; - httpRequest.headers['Content-Type'] = 'application/x-www-form-urlencoded'; - }, - - /** - * @api private - */ - updateRegion: function updateRegion(request) { - var endpoint = request.httpRequest.endpoint.hostname; - var zones = endpoint.split('.'); - request.httpRequest.region = zones[1] || request.httpRequest.region; - } - -}); - - -/***/ }), - -/***/ 59050: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var rdsutil = __nccwpck_require__(30650); - -/** -* @api private -*/ -var crossRegionOperations = ['createDBCluster', 'copyDBClusterSnapshot']; - -AWS.util.update(AWS.DocDB.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if ( - crossRegionOperations.indexOf(request.operation) !== -1 && - this.config.params && - this.config.params.SourceRegion && - request.params && - !request.params.SourceRegion - ) { - request.params.SourceRegion = this.config.params.SourceRegion; - } - rdsutil.setupRequestListeners(this, request, crossRegionOperations); - }, -}); - - -/***/ }), - -/***/ 17101: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -__nccwpck_require__(90030); - -AWS.util.update(AWS.DynamoDB.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if (request.service.config.dynamoDbCrc32) { - request.removeListener('extractData', AWS.EventListeners.Json.EXTRACT_DATA); - request.addListener('extractData', this.checkCrc32); - request.addListener('extractData', AWS.EventListeners.Json.EXTRACT_DATA); - } - }, - - /** - * @api private - */ - checkCrc32: function checkCrc32(resp) { - if (!resp.httpResponse.streaming && !resp.request.service.crc32IsValid(resp)) { - resp.data = null; - resp.error = AWS.util.error(new Error(), { - code: 'CRC32CheckFailed', - message: 'CRC32 integrity check failed', - retryable: true - }); - resp.request.haltHandlersOnError(); - throw (resp.error); - } - }, - - /** - * @api private - */ - crc32IsValid: function crc32IsValid(resp) { - var crc = resp.httpResponse.headers['x-amz-crc32']; - if (!crc) return true; // no (valid) CRC32 header - return parseInt(crc, 10) === AWS.util.crypto.crc32(resp.httpResponse.body); - }, - - /** - * @api private - */ - defaultRetryCount: 10, - - /** - * @api private - */ - retryDelays: function retryDelays(retryCount, err) { - var retryDelayOptions = AWS.util.copy(this.config.retryDelayOptions); - - if (typeof retryDelayOptions.base !== 'number') { - retryDelayOptions.base = 50; // default for dynamodb - } - var delay = AWS.util.calculateRetryDelay(retryCount, retryDelayOptions, err); - return delay; - } -}); - - -/***/ }), - -/***/ 92501: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.EC2.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.removeListener('extractError', AWS.EventListeners.Query.EXTRACT_ERROR); - request.addListener('extractError', this.extractError); - - if (request.operation === 'copySnapshot') { - request.onAsync('validate', this.buildCopySnapshotPresignedUrl); - } - }, - - /** - * @api private - */ - buildCopySnapshotPresignedUrl: function buildCopySnapshotPresignedUrl(req, done) { - if (req.params.PresignedUrl || req._subRequest) { - return done(); - } - - req.params = AWS.util.copy(req.params); - req.params.DestinationRegion = req.service.config.region; - - var config = AWS.util.copy(req.service.config); - delete config.endpoint; - config.region = req.params.SourceRegion; - var svc = new req.service.constructor(config); - var newReq = svc[req.operation](req.params); - newReq._subRequest = true; - newReq.presign(function(err, url) { - if (err) done(err); - else { - req.params.PresignedUrl = url; - done(); - } - }); - }, - - /** - * @api private - */ - extractError: function extractError(resp) { - // EC2 nests the error code and message deeper than other AWS Query services. - var httpResponse = resp.httpResponse; - var data = new AWS.XML.Parser().parse(httpResponse.body.toString() || ''); - if (data.Errors) { - resp.error = AWS.util.error(new Error(), { - code: data.Errors.Error.Code, - message: data.Errors.Error.Message - }); - } else { - resp.error = AWS.util.error(new Error(), { - code: httpResponse.statusCode, - message: null - }); - } - resp.error.requestId = data.RequestID || null; - } -}); - - -/***/ }), - -/***/ 3034: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.EventBridge.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if (request.operation === 'putEvents') { - var params = request.params || {}; - if (params.EndpointId !== undefined) { - throw new AWS.util.error(new Error(), { - code: 'InvalidParameter', - message: 'EndpointId is not supported in current SDK.\n' + - 'You should consider switching to V3(https://github.com/aws/aws-sdk-js-v3).' - }); - } - } - }, -}); - - -/***/ }), - -/***/ 14472: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.Glacier.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if (Array.isArray(request._events.validate)) { - request._events.validate.unshift(this.validateAccountId); - } else { - request.on('validate', this.validateAccountId); - } - request.removeListener('afterBuild', - AWS.EventListeners.Core.COMPUTE_SHA256); - request.on('build', this.addGlacierApiVersion); - request.on('build', this.addTreeHashHeaders); - }, - - /** - * @api private - */ - validateAccountId: function validateAccountId(request) { - if (request.params.accountId !== undefined) return; - request.params = AWS.util.copy(request.params); - request.params.accountId = '-'; - }, - - /** - * @api private - */ - addGlacierApiVersion: function addGlacierApiVersion(request) { - var version = request.service.api.apiVersion; - request.httpRequest.headers['x-amz-glacier-version'] = version; - }, - - /** - * @api private - */ - addTreeHashHeaders: function addTreeHashHeaders(request) { - if (request.params.body === undefined) return; - - var hashes = request.service.computeChecksums(request.params.body); - request.httpRequest.headers['X-Amz-Content-Sha256'] = hashes.linearHash; - - if (!request.httpRequest.headers['x-amz-sha256-tree-hash']) { - request.httpRequest.headers['x-amz-sha256-tree-hash'] = hashes.treeHash; - } - }, - - /** - * @!group Computing Checksums - */ - - /** - * Computes the SHA-256 linear and tree hash checksums for a given - * block of Buffer data. Pass the tree hash of the computed checksums - * as the checksum input to the {completeMultipartUpload} when performing - * a multi-part upload. - * - * @example Calculate checksum of 5.5MB data chunk - * var glacier = new AWS.Glacier(); - * var data = Buffer.alloc(5.5 * 1024 * 1024); - * data.fill('0'); // fill with zeros - * var results = glacier.computeChecksums(data); - * // Result: { linearHash: '68aff0c5a9...', treeHash: '154e26c78f...' } - * @param data [Buffer, String] data to calculate the checksum for - * @return [map] a map containing - * the linearHash and treeHash properties representing hex based digests - * of the respective checksums. - * @see completeMultipartUpload - */ - computeChecksums: function computeChecksums(data) { - if (!AWS.util.Buffer.isBuffer(data)) data = AWS.util.buffer.toBuffer(data); - - var mb = 1024 * 1024; - var hashes = []; - var hash = AWS.util.crypto.createHash('sha256'); - - // build leaf nodes in 1mb chunks - for (var i = 0; i < data.length; i += mb) { - var chunk = data.slice(i, Math.min(i + mb, data.length)); - hash.update(chunk); - hashes.push(AWS.util.crypto.sha256(chunk)); - } - - return { - linearHash: hash.digest('hex'), - treeHash: this.buildHashTree(hashes) - }; - }, - - /** - * @api private - */ - buildHashTree: function buildHashTree(hashes) { - // merge leaf nodes - while (hashes.length > 1) { - var tmpHashes = []; - for (var i = 0; i < hashes.length; i += 2) { - if (hashes[i + 1]) { - var tmpHash = AWS.util.buffer.alloc(64); - tmpHash.write(hashes[i], 0, 32, 'binary'); - tmpHash.write(hashes[i + 1], 32, 32, 'binary'); - tmpHashes.push(AWS.util.crypto.sha256(tmpHash)); - } else { - tmpHashes.push(hashes[i]); - } - } - hashes = tmpHashes; - } - - return AWS.util.crypto.toHex(hashes[0]); - } -}); - - -/***/ }), - -/***/ 27062: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * @api private - */ -var blobPayloadOutputOps = [ - 'deleteThingShadow', - 'getThingShadow', - 'updateThingShadow' -]; - -/** - * Constructs a service interface object. Each API operation is exposed as a - * function on service. - * - * ### Sending a Request Using IotData - * - * ```javascript - * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); - * iotdata.getThingShadow(params, function (err, data) { - * if (err) console.log(err, err.stack); // an error occurred - * else console.log(data); // successful response - * }); - * ``` - * - * ### Locking the API Version - * - * In order to ensure that the IotData object uses this specific API, - * you can construct the object by passing the `apiVersion` option to the - * constructor: - * - * ```javascript - * var iotdata = new AWS.IotData({ - * endpoint: 'my.host.tld', - * apiVersion: '2015-05-28' - * }); - * ``` - * - * You can also set the API version globally in `AWS.config.apiVersions` using - * the **iotdata** service identifier: - * - * ```javascript - * AWS.config.apiVersions = { - * iotdata: '2015-05-28', - * // other service API versions - * }; - * - * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); - * ``` - * - * @note You *must* provide an `endpoint` configuration parameter when - * constructing this service. See {constructor} for more information. - * - * @!method constructor(options = {}) - * Constructs a service object. This object has one method for each - * API operation. - * - * @example Constructing a IotData object - * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); - * @note You *must* provide an `endpoint` when constructing this service. - * @option (see AWS.Config.constructor) - * - * @service iotdata - * @version 2015-05-28 - */ -AWS.util.update(AWS.IotData.prototype, { - /** - * @api private - */ - validateService: function validateService() { - if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { - var msg = 'AWS.IotData requires an explicit ' + - '`endpoint\' configuration option.'; - throw AWS.util.error(new Error(), - {name: 'InvalidEndpoint', message: msg}); - } - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('validateResponse', this.validateResponseBody); - if (blobPayloadOutputOps.indexOf(request.operation) > -1) { - request.addListener('extractData', AWS.util.convertPayloadToString); - } - }, - - /** - * @api private - */ - validateResponseBody: function validateResponseBody(resp) { - var body = resp.httpResponse.body.toString() || '{}'; - var bodyCheck = body.trim(); - if (!bodyCheck || bodyCheck.charAt(0) !== '{') { - resp.httpResponse.body = ''; - } - } - -}); - - -/***/ }), - -/***/ 8452: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.Lambda.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if (request.operation === 'invoke') { - request.addListener('extractData', AWS.util.convertPayloadToString); - } - } -}); - - - -/***/ }), - -/***/ 19174: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.MachineLearning.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if (request.operation === 'predict') { - request.addListener('build', this.buildEndpoint); - } - }, - - /** - * Updates request endpoint from PredictEndpoint - * @api private - */ - buildEndpoint: function buildEndpoint(request) { - var url = request.params.PredictEndpoint; - if (url) { - request.httpRequest.endpoint = new AWS.Endpoint(url); - } - } - -}); - - -/***/ }), - -/***/ 73090: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var rdsutil = __nccwpck_require__(30650); - -/** -* @api private -*/ -var crossRegionOperations = ['createDBCluster', 'copyDBClusterSnapshot']; - -AWS.util.update(AWS.Neptune.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - if ( - crossRegionOperations.indexOf(request.operation) !== -1 && - this.config.params && - this.config.params.SourceRegion && - request.params && - !request.params.SourceRegion - ) { - request.params.SourceRegion = this.config.params.SourceRegion; - } - rdsutil.setupRequestListeners(this, request, crossRegionOperations); - }, -}); - - -/***/ }), - -/***/ 53199: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -__nccwpck_require__(44086); - - -/***/ }), - -/***/ 71928: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var rdsutil = __nccwpck_require__(30650); -__nccwpck_require__(16612); - /** - * @api private - */ - var crossRegionOperations = ['copyDBSnapshot', 'createDBInstanceReadReplica', 'createDBCluster', 'copyDBClusterSnapshot', 'startDBInstanceAutomatedBackupsReplication']; - - AWS.util.update(AWS.RDS.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - rdsutil.setupRequestListeners(this, request, crossRegionOperations); - }, - }); - - -/***/ }), - -/***/ 64070: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.RDSDataService.prototype, { - /** - * @return [Boolean] whether the error can be retried - * @api private - */ - retryableError: function retryableError(error) { - if (error.code === 'BadRequestException' && - error.message && - error.message.match(/^Communications link failure/) && - error.statusCode === 400) { - return true; - } else { - var _super = AWS.Service.prototype.retryableError; - return _super.call(this, error); - } - } -}); - - -/***/ }), - -/***/ 30650: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -var rdsutil = { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(service, request, crossRegionOperations) { - if (crossRegionOperations.indexOf(request.operation) !== -1 && - request.params.SourceRegion) { - request.params = AWS.util.copy(request.params); - if (request.params.PreSignedUrl || - request.params.SourceRegion === service.config.region) { - delete request.params.SourceRegion; - } else { - var doesParamValidation = !!service.config.paramValidation; - // remove the validate parameters listener so we can re-add it after we build the URL - if (doesParamValidation) { - request.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); - } - request.onAsync('validate', rdsutil.buildCrossRegionPresignedUrl); - if (doesParamValidation) { - request.addListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); - } - } - } - }, - - /** - * @api private - */ - buildCrossRegionPresignedUrl: function buildCrossRegionPresignedUrl(req, done) { - var config = AWS.util.copy(req.service.config); - config.region = req.params.SourceRegion; - delete req.params.SourceRegion; - delete config.endpoint; - // relevant params for the operation will already be in req.params - delete config.params; - config.signatureVersion = 'v4'; - var destinationRegion = req.service.config.region; - - var svc = new req.service.constructor(config); - var newReq = svc[req.operation](AWS.util.copy(req.params)); - newReq.on('build', function addDestinationRegionParam(request) { - var httpRequest = request.httpRequest; - httpRequest.params.DestinationRegion = destinationRegion; - httpRequest.body = AWS.util.queryParamsToString(httpRequest.params); - }); - newReq.presign(function(err, url) { - if (err) done(err); - else { - req.params.PreSignedUrl = url; - done(); - } - }); - } -}; - -/** - * @api private - */ -module.exports = rdsutil; - - -/***/ }), - -/***/ 69627: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.Route53.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.on('build', this.sanitizeUrl); - }, - - /** - * @api private - */ - sanitizeUrl: function sanitizeUrl(request) { - var path = request.httpRequest.path; - request.httpRequest.path = path.replace(/\/%2F\w+%2F/, '/'); - }, - - /** - * @return [Boolean] whether the error can be retried - * @api private - */ - retryableError: function retryableError(error) { - if (error.code === 'PriorRequestNotComplete' && - error.statusCode === 400) { - return true; - } else { - var _super = AWS.Service.prototype.retryableError; - return _super.call(this, error); - } - } -}); - - -/***/ }), - -/***/ 26543: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var v4Credentials = __nccwpck_require__(62660); -var resolveRegionalEndpointsFlag = __nccwpck_require__(85566); -var s3util = __nccwpck_require__(35895); -var regionUtil = __nccwpck_require__(18262); - -// Pull in managed upload extension -__nccwpck_require__(81600); - -/** - * @api private - */ -var operationsWith200StatusCodeError = { - 'completeMultipartUpload': true, - 'copyObject': true, - 'uploadPartCopy': true -}; - -/** - * @api private - */ - var regionRedirectErrorCodes = [ - 'AuthorizationHeaderMalformed', // non-head operations on virtual-hosted global bucket endpoints - 'BadRequest', // head operations on virtual-hosted global bucket endpoints - 'PermanentRedirect', // non-head operations on path-style or regional endpoints - 301 // head operations on path-style or regional endpoints - ]; - -var OBJECT_LAMBDA_SERVICE = 's3-object-lambda'; - -AWS.util.update(AWS.S3.prototype, { - /** - * @api private - */ - getSignatureVersion: function getSignatureVersion(request) { - var defaultApiVersion = this.api.signatureVersion; - var userDefinedVersion = this._originalConfig ? this._originalConfig.signatureVersion : null; - var regionDefinedVersion = this.config.signatureVersion; - var isPresigned = request ? request.isPresigned() : false; - /* - 1) User defined version specified: - a) always return user defined version - 2) No user defined version specified: - a) If not using presigned urls, default to V4 - b) If using presigned urls, default to lowest version the region supports - */ - if (userDefinedVersion) { - userDefinedVersion = userDefinedVersion === 'v2' ? 's3' : userDefinedVersion; - return userDefinedVersion; - } - if (isPresigned !== true) { - defaultApiVersion = 'v4'; - } else if (regionDefinedVersion) { - defaultApiVersion = regionDefinedVersion; - } - return defaultApiVersion; - }, - - /** - * @api private - */ - getSigningName: function getSigningName(req) { - if (req && req.operation === 'writeGetObjectResponse') { - return OBJECT_LAMBDA_SERVICE; - } - - var _super = AWS.Service.prototype.getSigningName; - return (req && req._parsedArn && req._parsedArn.service) - ? req._parsedArn.service - : _super.call(this); - }, - - /** - * @api private - */ - getSignerClass: function getSignerClass(request) { - var signatureVersion = this.getSignatureVersion(request); - return AWS.Signers.RequestSigner.getVersion(signatureVersion); - }, - - /** - * @api private - */ - validateService: function validateService() { - var msg; - var messages = []; - - // default to us-east-1 when no region is provided - if (!this.config.region) this.config.region = 'us-east-1'; - - if (!this.config.endpoint && this.config.s3BucketEndpoint) { - messages.push('An endpoint must be provided when configuring ' + - '`s3BucketEndpoint` to true.'); - } - if (messages.length === 1) { - msg = messages[0]; - } else if (messages.length > 1) { - msg = 'Multiple configuration errors:\n' + messages.join('\n'); - } - if (msg) { - throw AWS.util.error(new Error(), - {name: 'InvalidEndpoint', message: msg}); - } - }, - - /** - * @api private - */ - shouldDisableBodySigning: function shouldDisableBodySigning(request) { - var signerClass = this.getSignerClass(); - if (this.config.s3DisableBodySigning === true && signerClass === AWS.Signers.V4 - && request.httpRequest.endpoint.protocol === 'https:') { - return true; - } - return false; - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - var prependListener = true; - request.addListener('validate', this.validateScheme); - request.addListener('validate', this.validateBucketName, prependListener); - request.addListener('validate', this.optInUsEast1RegionalEndpoint, prependListener); - - request.removeListener('validate', - AWS.EventListeners.Core.VALIDATE_REGION); - request.addListener('build', this.addContentType); - request.addListener('build', this.computeContentMd5); - request.addListener('build', this.computeSseCustomerKeyMd5); - request.addListener('build', this.populateURI); - request.addListener('afterBuild', this.addExpect100Continue); - request.addListener('extractError', this.extractError); - request.addListener('extractData', AWS.util.hoistPayloadMember); - request.addListener('extractData', this.extractData); - request.addListener('extractData', this.extractErrorFrom200Response); - request.addListener('beforePresign', this.prepareSignedUrl); - if (this.shouldDisableBodySigning(request)) { - request.removeListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); - request.addListener('afterBuild', this.disableBodySigning); - } - //deal with ARNs supplied to Bucket - if (request.operation !== 'createBucket' && s3util.isArnInParam(request, 'Bucket')) { - // avoid duplicate parsing in the future - request._parsedArn = AWS.util.ARN.parse(request.params.Bucket); - - request.removeListener('validate', this.validateBucketName); - request.removeListener('build', this.populateURI); - if (request._parsedArn.service === 's3') { - request.addListener('validate', s3util.validateS3AccessPointArn); - request.addListener('validate', this.validateArnResourceType); - request.addListener('validate', this.validateArnRegion); - } else if (request._parsedArn.service === 's3-outposts') { - request.addListener('validate', s3util.validateOutpostsAccessPointArn); - request.addListener('validate', s3util.validateOutpostsArn); - request.addListener('validate', s3util.validateArnRegion); - } - request.addListener('validate', s3util.validateArnAccount); - request.addListener('validate', s3util.validateArnService); - request.addListener('build', this.populateUriFromAccessPointArn); - request.addListener('build', s3util.validatePopulateUriFromArn); - return; - } - //listeners regarding region inference - request.addListener('validate', this.validateBucketEndpoint); - request.addListener('validate', this.correctBucketRegionFromCache); - request.onAsync('extractError', this.requestBucketRegion); - if (AWS.util.isBrowser()) { - request.onAsync('retry', this.reqRegionForNetworkingError); - } - }, - - /** - * @api private - */ - validateScheme: function(req) { - var params = req.params, - scheme = req.httpRequest.endpoint.protocol, - sensitive = params.SSECustomerKey || params.CopySourceSSECustomerKey; - if (sensitive && scheme !== 'https:') { - var msg = 'Cannot send SSE keys over HTTP. Set \'sslEnabled\'' + - 'to \'true\' in your configuration'; - throw AWS.util.error(new Error(), - { code: 'ConfigError', message: msg }); - } - }, - - /** - * @api private - */ - validateBucketEndpoint: function(req) { - if (!req.params.Bucket && req.service.config.s3BucketEndpoint) { - var msg = 'Cannot send requests to root API with `s3BucketEndpoint` set.'; - throw AWS.util.error(new Error(), - { code: 'ConfigError', message: msg }); - } - }, - - /** - * @api private - */ - validateArnRegion: function validateArnRegion(req) { - s3util.validateArnRegion(req, { allowFipsEndpoint: true }); - }, - - /** - * Validate resource-type supplied in S3 ARN - */ - validateArnResourceType: function validateArnResourceType(req) { - var resource = req._parsedArn.resource; - - if ( - resource.indexOf('accesspoint:') !== 0 && - resource.indexOf('accesspoint/') !== 0 - ) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'ARN resource should begin with \'accesspoint/\'' - }); - } - }, - - /** - * @api private - */ - validateBucketName: function validateBucketName(req) { - var service = req.service; - var signatureVersion = service.getSignatureVersion(req); - var bucket = req.params && req.params.Bucket; - var key = req.params && req.params.Key; - var slashIndex = bucket && bucket.indexOf('/'); - if (bucket && slashIndex >= 0) { - if (typeof key === 'string' && slashIndex > 0) { - req.params = AWS.util.copy(req.params); - // Need to include trailing slash to match sigv2 behavior - var prefix = bucket.substr(slashIndex + 1) || ''; - req.params.Key = prefix + '/' + key; - req.params.Bucket = bucket.substr(0, slashIndex); - } else if (signatureVersion === 'v4') { - var msg = 'Bucket names cannot contain forward slashes. Bucket: ' + bucket; - throw AWS.util.error(new Error(), - { code: 'InvalidBucket', message: msg }); - } - } - }, - - /** - * @api private - */ - isValidAccelerateOperation: function isValidAccelerateOperation(operation) { - var invalidOperations = [ - 'createBucket', - 'deleteBucket', - 'listBuckets' - ]; - return invalidOperations.indexOf(operation) === -1; - }, - - /** - * When us-east-1 region endpoint configuration is set, in stead of sending request to - * global endpoint(e.g. 's3.amazonaws.com'), we will send request to - * 's3.us-east-1.amazonaws.com'. - * @api private - */ - optInUsEast1RegionalEndpoint: function optInUsEast1RegionalEndpoint(req) { - var service = req.service; - var config = service.config; - config.s3UsEast1RegionalEndpoint = resolveRegionalEndpointsFlag(service._originalConfig, { - env: 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT', - sharedConfig: 's3_us_east_1_regional_endpoint', - clientConfig: 's3UsEast1RegionalEndpoint' - }); - if ( - !(service._originalConfig || {}).endpoint && - req.httpRequest.region === 'us-east-1' && - config.s3UsEast1RegionalEndpoint === 'regional' && - req.httpRequest.endpoint.hostname.indexOf('s3.amazonaws.com') >= 0 - ) { - var insertPoint = config.endpoint.indexOf('.amazonaws.com'); - regionalEndpoint = config.endpoint.substring(0, insertPoint) + - '.us-east-1' + config.endpoint.substring(insertPoint); - req.httpRequest.updateEndpoint(regionalEndpoint); - } - }, - - /** - * S3 prefers dns-compatible bucket names to be moved from the uri path - * to the hostname as a sub-domain. This is not possible, even for dns-compat - * buckets when using SSL and the bucket name contains a dot ('.'). The - * ssl wildcard certificate is only 1-level deep. - * - * @api private - */ - populateURI: function populateURI(req) { - var httpRequest = req.httpRequest; - var b = req.params.Bucket; - var service = req.service; - var endpoint = httpRequest.endpoint; - if (b) { - if (!service.pathStyleBucketName(b)) { - if (service.config.useAccelerateEndpoint && service.isValidAccelerateOperation(req.operation)) { - if (service.config.useDualstackEndpoint) { - endpoint.hostname = b + '.s3-accelerate.dualstack.amazonaws.com'; - } else { - endpoint.hostname = b + '.s3-accelerate.amazonaws.com'; - } - } else if (!service.config.s3BucketEndpoint) { - endpoint.hostname = - b + '.' + endpoint.hostname; - } - - var port = endpoint.port; - if (port !== 80 && port !== 443) { - endpoint.host = endpoint.hostname + ':' + - endpoint.port; - } else { - endpoint.host = endpoint.hostname; - } - - httpRequest.virtualHostedBucket = b; // needed for signing the request - service.removeVirtualHostedBucketFromPath(req); - } - } - }, - - /** - * Takes the bucket name out of the path if bucket is virtual-hosted - * - * @api private - */ - removeVirtualHostedBucketFromPath: function removeVirtualHostedBucketFromPath(req) { - var httpRequest = req.httpRequest; - var bucket = httpRequest.virtualHostedBucket; - if (bucket && httpRequest.path) { - if (req.params && req.params.Key) { - var encodedS3Key = '/' + AWS.util.uriEscapePath(req.params.Key); - if (httpRequest.path.indexOf(encodedS3Key) === 0 && (httpRequest.path.length === encodedS3Key.length || httpRequest.path[encodedS3Key.length] === '?')) { - //path only contains key or path contains only key and querystring - return; - } - } - httpRequest.path = httpRequest.path.replace(new RegExp('/' + bucket), ''); - if (httpRequest.path[0] !== '/') { - httpRequest.path = '/' + httpRequest.path; - } - } - }, - - /** - * When user supply an access point ARN in the Bucket parameter, we need to - * populate the URI according to the ARN. - */ - populateUriFromAccessPointArn: function populateUriFromAccessPointArn(req) { - var accessPointArn = req._parsedArn; - - var isOutpostArn = accessPointArn.service === 's3-outposts'; - var isObjectLambdaArn = accessPointArn.service === 's3-object-lambda'; - - var outpostsSuffix = isOutpostArn ? '.' + accessPointArn.outpostId: ''; - var serviceName = isOutpostArn ? 's3-outposts': 's3-accesspoint'; - var fipsSuffix = !isOutpostArn && req.service.config.useFipsEndpoint ? '-fips': ''; - var dualStackSuffix = !isOutpostArn && - req.service.config.useDualstackEndpoint ? '.dualstack' : ''; - - var endpoint = req.httpRequest.endpoint; - var dnsSuffix = regionUtil.getEndpointSuffix(accessPointArn.region); - var useArnRegion = req.service.config.s3UseArnRegion; - - endpoint.hostname = [ - accessPointArn.accessPoint + '-' + accessPointArn.accountId + outpostsSuffix, - serviceName + fipsSuffix + dualStackSuffix, - useArnRegion ? accessPointArn.region : req.service.config.region, - dnsSuffix - ].join('.'); - - if (isObjectLambdaArn) { - // should be in the format: "accesspoint/${accesspointName}" - var serviceName = 's3-object-lambda'; - var accesspointName = accessPointArn.resource.split('/')[1]; - var fipsSuffix = req.service.config.useFipsEndpoint ? '-fips': ''; - endpoint.hostname = [ - accesspointName + '-' + accessPointArn.accountId, - serviceName + fipsSuffix, - useArnRegion ? accessPointArn.region : req.service.config.region, - dnsSuffix - ].join('.'); - } - endpoint.host = endpoint.hostname; - var encodedArn = AWS.util.uriEscape(req.params.Bucket); - var path = req.httpRequest.path; - //remove the Bucket value from path - req.httpRequest.path = path.replace(new RegExp('/' + encodedArn), ''); - if (req.httpRequest.path[0] !== '/') { - req.httpRequest.path = '/' + req.httpRequest.path; - } - req.httpRequest.region = accessPointArn.region; //region used to sign - }, - - /** - * Adds Expect: 100-continue header if payload is greater-or-equal 1MB - * @api private - */ - addExpect100Continue: function addExpect100Continue(req) { - var len = req.httpRequest.headers['Content-Length']; - if (AWS.util.isNode() && (len >= 1024 * 1024 || req.params.Body instanceof AWS.util.stream.Stream)) { - req.httpRequest.headers['Expect'] = '100-continue'; - } - }, - - /** - * Adds a default content type if none is supplied. - * - * @api private - */ - addContentType: function addContentType(req) { - var httpRequest = req.httpRequest; - if (httpRequest.method === 'GET' || httpRequest.method === 'HEAD') { - // Content-Type is not set in GET/HEAD requests - delete httpRequest.headers['Content-Type']; - return; - } - - if (!httpRequest.headers['Content-Type']) { // always have a Content-Type - httpRequest.headers['Content-Type'] = 'application/octet-stream'; - } - - var contentType = httpRequest.headers['Content-Type']; - if (AWS.util.isBrowser()) { - if (typeof httpRequest.body === 'string' && !contentType.match(/;\s*charset=/)) { - var charset = '; charset=UTF-8'; - httpRequest.headers['Content-Type'] += charset; - } else { - var replaceFn = function(_, prefix, charsetName) { - return prefix + charsetName.toUpperCase(); - }; - - httpRequest.headers['Content-Type'] = - contentType.replace(/(;\s*charset=)(.+)$/, replaceFn); - } - } - }, - - /** - * Checks whether checksums should be computed for the request if it's not - * already set by {AWS.EventListeners.Core.COMPUTE_CHECKSUM}. It depends on - * whether {AWS.Config.computeChecksums} is set. - * - * @param req [AWS.Request] the request to check against - * @return [Boolean] whether to compute checksums for a request. - * @api private - */ - willComputeChecksums: function willComputeChecksums(req) { - var rules = req.service.api.operations[req.operation].input.members; - var body = req.httpRequest.body; - var needsContentMD5 = req.service.config.computeChecksums && - rules.ContentMD5 && - !req.params.ContentMD5 && - body && - (AWS.util.Buffer.isBuffer(req.httpRequest.body) || typeof req.httpRequest.body === 'string'); - - // Sha256 signing disabled, and not a presigned url - if (needsContentMD5 && req.service.shouldDisableBodySigning(req) && !req.isPresigned()) { - return true; - } - - // SigV2 and presign, for backwards compatibility purpose. - if (needsContentMD5 && this.getSignatureVersion(req) === 's3' && req.isPresigned()) { - return true; - } - - return false; - }, - - /** - * A listener that computes the Content-MD5 and sets it in the header. - * This listener is to support S3-specific features like - * s3DisableBodySigning and SigV2 presign. Content MD5 logic for SigV4 is - * handled in AWS.EventListeners.Core.COMPUTE_CHECKSUM - * - * @api private - */ - computeContentMd5: function computeContentMd5(req) { - if (req.service.willComputeChecksums(req)) { - var md5 = AWS.util.crypto.md5(req.httpRequest.body, 'base64'); - req.httpRequest.headers['Content-MD5'] = md5; - } - }, - - /** - * @api private - */ - computeSseCustomerKeyMd5: function computeSseCustomerKeyMd5(req) { - var keys = { - SSECustomerKey: 'x-amz-server-side-encryption-customer-key-MD5', - CopySourceSSECustomerKey: 'x-amz-copy-source-server-side-encryption-customer-key-MD5' - }; - AWS.util.each(keys, function(key, header) { - if (req.params[key]) { - var value = AWS.util.crypto.md5(req.params[key], 'base64'); - req.httpRequest.headers[header] = value; - } - }); - }, - - /** - * Returns true if the bucket name should be left in the URI path for - * a request to S3. This function takes into account the current - * endpoint protocol (e.g. http or https). - * - * @api private - */ - pathStyleBucketName: function pathStyleBucketName(bucketName) { - // user can force path style requests via the configuration - if (this.config.s3ForcePathStyle) return true; - if (this.config.s3BucketEndpoint) return false; - - if (s3util.dnsCompatibleBucketName(bucketName)) { - return (this.config.sslEnabled && bucketName.match(/\./)) ? true : false; - } else { - return true; // not dns compatible names must always use path style - } - }, - - /** - * For COPY operations, some can be error even with status code 200. - * SDK treats the response as exception when response body indicates - * an exception or body is empty. - * - * @api private - */ - extractErrorFrom200Response: function extractErrorFrom200Response(resp) { - if (!operationsWith200StatusCodeError[resp.request.operation]) return; - var httpResponse = resp.httpResponse; - if (httpResponse.body && httpResponse.body.toString().match('')) { - // Response body with '...' indicates an exception. - // Get S3 client object. In ManagedUpload, this.service refers to - // S3 client object. - resp.data = null; - var service = this.service ? this.service : this; - service.extractError(resp); - throw resp.error; - } else if (!httpResponse.body || !httpResponse.body.toString().match(/<[\w_]/)) { - // When body is empty or incomplete, S3 might stop the request on detecting client - // side aborting the request. - resp.data = null; - throw AWS.util.error(new Error(), { - code: 'InternalError', - message: 'S3 aborted request' - }); - } - }, - - /** - * @return [Boolean] whether the error can be retried - * @api private - */ - retryableError: function retryableError(error, request) { - if (operationsWith200StatusCodeError[request.operation] && - error.statusCode === 200) { - return true; - } else if (request._requestRegionForBucket && - request.service.bucketRegionCache[request._requestRegionForBucket]) { - return false; - } else if (error && error.code === 'RequestTimeout') { - return true; - } else if (error && - regionRedirectErrorCodes.indexOf(error.code) != -1 && - error.region && error.region != request.httpRequest.region) { - request.httpRequest.region = error.region; - if (error.statusCode === 301) { - request.service.updateReqBucketRegion(request); - } - return true; - } else { - var _super = AWS.Service.prototype.retryableError; - return _super.call(this, error, request); - } - }, - - /** - * Updates httpRequest with region. If region is not provided, then - * the httpRequest will be updated based on httpRequest.region - * - * @api private - */ - updateReqBucketRegion: function updateReqBucketRegion(request, region) { - var httpRequest = request.httpRequest; - if (typeof region === 'string' && region.length) { - httpRequest.region = region; - } - if (!httpRequest.endpoint.host.match(/s3(?!-accelerate).*\.amazonaws\.com$/)) { - return; - } - var service = request.service; - var s3Config = service.config; - var s3BucketEndpoint = s3Config.s3BucketEndpoint; - if (s3BucketEndpoint) { - delete s3Config.s3BucketEndpoint; - } - var newConfig = AWS.util.copy(s3Config); - delete newConfig.endpoint; - newConfig.region = httpRequest.region; - - httpRequest.endpoint = (new AWS.S3(newConfig)).endpoint; - service.populateURI(request); - s3Config.s3BucketEndpoint = s3BucketEndpoint; - httpRequest.headers.Host = httpRequest.endpoint.host; - - if (request._asm.currentState === 'validate') { - request.removeListener('build', service.populateURI); - request.addListener('build', service.removeVirtualHostedBucketFromPath); - } - }, - - /** - * Provides a specialized parser for getBucketLocation -- all other - * operations are parsed by the super class. - * - * @api private - */ - extractData: function extractData(resp) { - var req = resp.request; - if (req.operation === 'getBucketLocation') { - var match = resp.httpResponse.body.toString().match(/>(.+)<\/Location/); - delete resp.data['_']; - if (match) { - resp.data.LocationConstraint = match[1]; - } else { - resp.data.LocationConstraint = ''; - } - } - var bucket = req.params.Bucket || null; - if (req.operation === 'deleteBucket' && typeof bucket === 'string' && !resp.error) { - req.service.clearBucketRegionCache(bucket); - } else { - var headers = resp.httpResponse.headers || {}; - var region = headers['x-amz-bucket-region'] || null; - if (!region && req.operation === 'createBucket' && !resp.error) { - var createBucketConfiguration = req.params.CreateBucketConfiguration; - if (!createBucketConfiguration) { - region = 'us-east-1'; - } else if (createBucketConfiguration.LocationConstraint === 'EU') { - region = 'eu-west-1'; - } else { - region = createBucketConfiguration.LocationConstraint; - } - } - if (region) { - if (bucket && region !== req.service.bucketRegionCache[bucket]) { - req.service.bucketRegionCache[bucket] = region; - } - } - } - req.service.extractRequestIds(resp); - }, - - /** - * Extracts an error object from the http response. - * - * @api private - */ - extractError: function extractError(resp) { - var codes = { - 304: 'NotModified', - 403: 'Forbidden', - 400: 'BadRequest', - 404: 'NotFound' - }; - - var req = resp.request; - var code = resp.httpResponse.statusCode; - var body = resp.httpResponse.body || ''; - - var headers = resp.httpResponse.headers || {}; - var region = headers['x-amz-bucket-region'] || null; - var bucket = req.params.Bucket || null; - var bucketRegionCache = req.service.bucketRegionCache; - if (region && bucket && region !== bucketRegionCache[bucket]) { - bucketRegionCache[bucket] = region; - } - - var cachedRegion; - if (codes[code] && body.length === 0) { - if (bucket && !region) { - cachedRegion = bucketRegionCache[bucket] || null; - if (cachedRegion !== req.httpRequest.region) { - region = cachedRegion; - } - } - resp.error = AWS.util.error(new Error(), { - code: codes[code], - message: null, - region: region - }); - } else { - var data = new AWS.XML.Parser().parse(body.toString()); - - if (data.Region && !region) { - region = data.Region; - if (bucket && region !== bucketRegionCache[bucket]) { - bucketRegionCache[bucket] = region; - } - } else if (bucket && !region && !data.Region) { - cachedRegion = bucketRegionCache[bucket] || null; - if (cachedRegion !== req.httpRequest.region) { - region = cachedRegion; - } - } - - resp.error = AWS.util.error(new Error(), { - code: data.Code || code, - message: data.Message || null, - region: region - }); - } - req.service.extractRequestIds(resp); - }, - - /** - * If region was not obtained synchronously, then send async request - * to get bucket region for errors resulting from wrong region. - * - * @api private - */ - requestBucketRegion: function requestBucketRegion(resp, done) { - var error = resp.error; - var req = resp.request; - var bucket = req.params.Bucket || null; - - if (!error || !bucket || error.region || req.operation === 'listObjects' || - (AWS.util.isNode() && req.operation === 'headBucket') || - (error.statusCode === 400 && req.operation !== 'headObject') || - regionRedirectErrorCodes.indexOf(error.code) === -1) { - return done(); - } - var reqOperation = AWS.util.isNode() ? 'headBucket' : 'listObjects'; - var reqParams = {Bucket: bucket}; - if (reqOperation === 'listObjects') reqParams.MaxKeys = 0; - var regionReq = req.service[reqOperation](reqParams); - regionReq._requestRegionForBucket = bucket; - regionReq.send(function() { - var region = req.service.bucketRegionCache[bucket] || null; - error.region = region; - done(); - }); - }, - - /** - * For browser only. If NetworkingError received, will attempt to obtain - * the bucket region. - * - * @api private - */ - reqRegionForNetworkingError: function reqRegionForNetworkingError(resp, done) { - if (!AWS.util.isBrowser()) { - return done(); - } - var error = resp.error; - var request = resp.request; - var bucket = request.params.Bucket; - if (!error || error.code !== 'NetworkingError' || !bucket || - request.httpRequest.region === 'us-east-1') { - return done(); - } - var service = request.service; - var bucketRegionCache = service.bucketRegionCache; - var cachedRegion = bucketRegionCache[bucket] || null; - - if (cachedRegion && cachedRegion !== request.httpRequest.region) { - service.updateReqBucketRegion(request, cachedRegion); - done(); - } else if (!s3util.dnsCompatibleBucketName(bucket)) { - service.updateReqBucketRegion(request, 'us-east-1'); - if (bucketRegionCache[bucket] !== 'us-east-1') { - bucketRegionCache[bucket] = 'us-east-1'; - } - done(); - } else if (request.httpRequest.virtualHostedBucket) { - var getRegionReq = service.listObjects({Bucket: bucket, MaxKeys: 0}); - service.updateReqBucketRegion(getRegionReq, 'us-east-1'); - getRegionReq._requestRegionForBucket = bucket; - - getRegionReq.send(function() { - var region = service.bucketRegionCache[bucket] || null; - if (region && region !== request.httpRequest.region) { - service.updateReqBucketRegion(request, region); - } - done(); - }); - } else { - // DNS-compatible path-style - // (s3ForcePathStyle or bucket name with dot over https) - // Cannot obtain region information for this case - done(); - } - }, - - /** - * Cache for bucket region. - * - * @api private - */ - bucketRegionCache: {}, - - /** - * Clears bucket region cache. - * - * @api private - */ - clearBucketRegionCache: function(buckets) { - var bucketRegionCache = this.bucketRegionCache; - if (!buckets) { - buckets = Object.keys(bucketRegionCache); - } else if (typeof buckets === 'string') { - buckets = [buckets]; - } - for (var i = 0; i < buckets.length; i++) { - delete bucketRegionCache[buckets[i]]; - } - return bucketRegionCache; - }, - - /** - * Corrects request region if bucket's cached region is different - * - * @api private - */ - correctBucketRegionFromCache: function correctBucketRegionFromCache(req) { - var bucket = req.params.Bucket || null; - if (bucket) { - var service = req.service; - var requestRegion = req.httpRequest.region; - var cachedRegion = service.bucketRegionCache[bucket]; - if (cachedRegion && cachedRegion !== requestRegion) { - service.updateReqBucketRegion(req, cachedRegion); - } - } - }, - - /** - * Extracts S3 specific request ids from the http response. - * - * @api private - */ - extractRequestIds: function extractRequestIds(resp) { - var extendedRequestId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-id-2'] : null; - var cfId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-cf-id'] : null; - resp.extendedRequestId = extendedRequestId; - resp.cfId = cfId; - - if (resp.error) { - resp.error.requestId = resp.requestId || null; - resp.error.extendedRequestId = extendedRequestId; - resp.error.cfId = cfId; - } - }, - - /** - * Get a pre-signed URL for a given operation name. - * - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * @note Not all operation parameters are supported when using pre-signed - * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, - * `ContentLength`, or `Tagging` must be provided as headers when sending a - * request. If you are using pre-signed URLs to upload from a browser and - * need to use these fields, see {createPresignedPost}. - * @note The default signer allows altering the request by adding corresponding - * headers to set some parameters (e.g. Range) and these added parameters - * won't be signed. You must use signatureVersion v4 to to include these - * parameters in the signed portion of the URL and enforce exact matching - * between headers and signed params in the URL. - * @note This operation cannot be used with a promise. See note above regarding - * asynchronous credentials and use with a callback. - * @param operation [String] the name of the operation to call - * @param params [map] parameters to pass to the operation. See the given - * operation for the expected operation parameters. In addition, you can - * also pass the "Expires" parameter to inform S3 how long the URL should - * work for. - * @option params Expires [Integer] (900) the number of seconds to expire - * the pre-signed URL operation in. Defaults to 15 minutes. - * @param callback [Function] if a callback is provided, this function will - * pass the URL as the second parameter (after the error parameter) to - * the callback function. - * @return [String] if called synchronously (with no callback), returns the - * signed URL. - * @return [null] nothing is returned if a callback is provided. - * @example Pre-signing a getObject operation (synchronously) - * var params = {Bucket: 'bucket', Key: 'key'}; - * var url = s3.getSignedUrl('getObject', params); - * console.log('The URL is', url); - * @example Pre-signing a putObject (asynchronously) - * var params = {Bucket: 'bucket', Key: 'key'}; - * s3.getSignedUrl('putObject', params, function (err, url) { - * console.log('The URL is', url); - * }); - * @example Pre-signing a putObject operation with a specific payload - * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; - * var url = s3.getSignedUrl('putObject', params); - * console.log('The URL is', url); - * @example Passing in a 1-minute expiry time for a pre-signed URL - * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; - * var url = s3.getSignedUrl('getObject', params); - * console.log('The URL is', url); // expires in 60 seconds - */ - getSignedUrl: function getSignedUrl(operation, params, callback) { - params = AWS.util.copy(params || {}); - var expires = params.Expires || 900; - - if (typeof expires !== 'number') { - throw AWS.util.error(new Error(), - { code: 'InvalidParameterException', message: 'The expiration must be a number, received ' + typeof expires }); - } - - delete params.Expires; // we can't validate this - var request = this.makeRequest(operation, params); - - if (callback) { - AWS.util.defer(function() { - request.presign(expires, callback); - }); - } else { - return request.presign(expires, callback); - } - }, - - /** - * @!method getSignedUrlPromise() - * Returns a 'thenable' promise that will be resolved with a pre-signed URL - * for a given operation name. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @note Not all operation parameters are supported when using pre-signed - * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, - * `ContentLength`, or `Tagging` must be provided as headers when sending a - * request. If you are using pre-signed URLs to upload from a browser and - * need to use these fields, see {createPresignedPost}. - * @param operation [String] the name of the operation to call - * @param params [map] parameters to pass to the operation. See the given - * operation for the expected operation parameters. In addition, you can - * also pass the "Expires" parameter to inform S3 how long the URL should - * work for. - * @option params Expires [Integer] (900) the number of seconds to expire - * the pre-signed URL operation in. Defaults to 15 minutes. - * @callback fulfilledCallback function(url) - * Called if the promise is fulfilled. - * @param url [String] the signed url - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled - * @return [Promise] A promise that represents the state of the `refresh` call. - * @example Pre-signing a getObject operation - * var params = {Bucket: 'bucket', Key: 'key'}; - * var promise = s3.getSignedUrlPromise('getObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - * @example Pre-signing a putObject operation with a specific payload - * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; - * var promise = s3.getSignedUrlPromise('putObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - * @example Passing in a 1-minute expiry time for a pre-signed URL - * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; - * var promise = s3.getSignedUrlPromise('getObject', params); - * promise.then(function(url) { - * console.log('The URL is', url); - * }, function(err) { ... }); - */ - - /** - * Get a pre-signed POST policy to support uploading to S3 directly from an - * HTML form. - * - * @param params [map] - * @option params Bucket [String] The bucket to which the post should be - * uploaded - * @option params Expires [Integer] (3600) The number of seconds for which - * the presigned policy should be valid. - * @option params Conditions [Array] An array of conditions that must be met - * for the presigned policy to allow the - * upload. This can include required tags, - * the accepted range for content lengths, - * etc. - * @see http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html - * @option params Fields [map] Fields to include in the form. All - * values passed in as fields will be - * signed as exact match conditions. - * @param callback [Function] - * - * @note All fields passed in when creating presigned post data will be signed - * as exact match conditions. Any fields that will be interpolated by S3 - * must be added to the fields hash after signing, and an appropriate - * condition for such fields must be explicitly added to the Conditions - * array passed to this function before signing. - * - * @example Presiging post data with a known key - * var params = { - * Bucket: 'bucket', - * Fields: { - * key: 'key' - * } - * }; - * s3.createPresignedPost(params, function(err, data) { - * if (err) { - * console.error('Presigning post data encountered an error', err); - * } else { - * console.log('The post data is', data); - * } - * }); - * - * @example Presigning post data with an interpolated key - * var params = { - * Bucket: 'bucket', - * Conditions: [ - * ['starts-with', '$key', 'path/to/uploads/'] - * ] - * }; - * s3.createPresignedPost(params, function(err, data) { - * if (err) { - * console.error('Presigning post data encountered an error', err); - * } else { - * data.Fields.key = 'path/to/uploads/${filename}'; - * console.log('The post data is', data); - * } - * }); - * - * @note You must ensure that you have static or previously resolved - * credentials if you call this method synchronously (with no callback), - * otherwise it may not properly sign the request. If you cannot guarantee - * this (you are using an asynchronous credential provider, i.e., EC2 - * IAM roles), you should always call this method with an asynchronous - * callback. - * - * @return [map] If called synchronously (with no callback), returns a hash - * with the url to set as the form action and a hash of fields - * to include in the form. - * @return [null] Nothing is returned if a callback is provided. - * - * @callback callback function (err, data) - * @param err [Error] the error object returned from the policy signer - * @param data [map] The data necessary to construct an HTML form - * @param data.url [String] The URL to use as the action of the form - * @param data.fields [map] A hash of fields that must be included in the - * form for the upload to succeed. This hash will - * include the signed POST policy, your access key - * ID and security token (if present), etc. These - * may be safely included as input elements of type - * 'hidden.' - */ - createPresignedPost: function createPresignedPost(params, callback) { - if (typeof params === 'function' && callback === undefined) { - callback = params; - params = null; - } - - params = AWS.util.copy(params || {}); - var boundParams = this.config.params || {}; - var bucket = params.Bucket || boundParams.Bucket, - self = this, - config = this.config, - endpoint = AWS.util.copy(this.endpoint); - if (!config.s3BucketEndpoint) { - endpoint.pathname = '/' + bucket; - } - - function finalizePost() { - return { - url: AWS.util.urlFormat(endpoint), - fields: self.preparePostFields( - config.credentials, - config.region, - bucket, - params.Fields, - params.Conditions, - params.Expires - ) - }; - } - - if (callback) { - config.getCredentials(function (err) { - if (err) { - callback(err); - } else { - try { - callback(null, finalizePost()); - } catch (err) { - callback(err); - } - } - }); - } else { - return finalizePost(); - } - }, - - /** - * @api private - */ - preparePostFields: function preparePostFields( - credentials, - region, - bucket, - fields, - conditions, - expiresInSeconds - ) { - var now = this.getSkewCorrectedDate(); - if (!credentials || !region || !bucket) { - throw new Error('Unable to create a POST object policy without a bucket,' - + ' region, and credentials'); - } - fields = AWS.util.copy(fields || {}); - conditions = (conditions || []).slice(0); - expiresInSeconds = expiresInSeconds || 3600; - - var signingDate = AWS.util.date.iso8601(now).replace(/[:\-]|\.\d{3}/g, ''); - var shortDate = signingDate.substr(0, 8); - var scope = v4Credentials.createScope(shortDate, region, 's3'); - var credential = credentials.accessKeyId + '/' + scope; - - fields['bucket'] = bucket; - fields['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256'; - fields['X-Amz-Credential'] = credential; - fields['X-Amz-Date'] = signingDate; - if (credentials.sessionToken) { - fields['X-Amz-Security-Token'] = credentials.sessionToken; - } - for (var field in fields) { - if (fields.hasOwnProperty(field)) { - var condition = {}; - condition[field] = fields[field]; - conditions.push(condition); - } - } - - fields.Policy = this.preparePostPolicy( - new Date(now.valueOf() + expiresInSeconds * 1000), - conditions - ); - fields['X-Amz-Signature'] = AWS.util.crypto.hmac( - v4Credentials.getSigningKey(credentials, shortDate, region, 's3', true), - fields.Policy, - 'hex' - ); - - return fields; - }, - - /** - * @api private - */ - preparePostPolicy: function preparePostPolicy(expiration, conditions) { - return AWS.util.base64.encode(JSON.stringify({ - expiration: AWS.util.date.iso8601(expiration), - conditions: conditions - })); - }, - - /** - * @api private - */ - prepareSignedUrl: function prepareSignedUrl(request) { - request.addListener('validate', request.service.noPresignedContentLength); - request.removeListener('build', request.service.addContentType); - if (!request.params.Body) { - // no Content-MD5/SHA-256 if body is not provided - request.removeListener('build', request.service.computeContentMd5); - } else { - request.addListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); - } - }, - - /** - * @api private - * @param request - */ - disableBodySigning: function disableBodySigning(request) { - var headers = request.httpRequest.headers; - // Add the header to anything that isn't a presigned url, unless that presigned url had a body defined - if (!Object.prototype.hasOwnProperty.call(headers, 'presigned-expires')) { - headers['X-Amz-Content-Sha256'] = 'UNSIGNED-PAYLOAD'; - } - }, - - /** - * @api private - */ - noPresignedContentLength: function noPresignedContentLength(request) { - if (request.params.ContentLength !== undefined) { - throw AWS.util.error(new Error(), {code: 'UnexpectedParameter', - message: 'ContentLength is not supported in pre-signed URLs.'}); - } - }, - - createBucket: function createBucket(params, callback) { - // When creating a bucket *outside* the classic region, the location - // constraint must be set for the bucket and it must match the endpoint. - // This chunk of code will set the location constraint param based - // on the region (when possible), but it will not override a passed-in - // location constraint. - if (typeof params === 'function' || !params) { - callback = callback || params; - params = {}; - } - var hostname = this.endpoint.hostname; - // copy params so that appending keys does not unintentioinallly - // mutate params object argument passed in by user - var copiedParams = AWS.util.copy(params); - - if (hostname !== this.api.globalEndpoint && !params.CreateBucketConfiguration) { - copiedParams.CreateBucketConfiguration = { LocationConstraint: this.config.region }; - } - return this.makeRequest('createBucket', copiedParams, callback); - }, - - writeGetObjectResponse: function writeGetObjectResponse(params, callback) { - - var request = this.makeRequest('writeGetObjectResponse', AWS.util.copy(params), callback); - var hostname = this.endpoint.hostname; - if (hostname.indexOf(this.config.region) !== -1) { - // hostname specifies a region already - hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.'); - } else { - // Hostname doesn't have a region. - // Object Lambda requires an explicit region. - hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.' + this.config.region + '.'); - } - - request.httpRequest.endpoint = new AWS.Endpoint(hostname, this.config); - return request; - }, - - /** - * @see AWS.S3.ManagedUpload - * @overload upload(params = {}, [options], [callback]) - * Uploads an arbitrarily sized buffer, blob, or stream, using intelligent - * concurrent handling of parts if the payload is large enough. You can - * configure the concurrent queue size by setting `options`. Note that this - * is the only operation for which the SDK can retry requests with stream - * bodies. - * - * @param (see AWS.S3.putObject) - * @option (see AWS.S3.ManagedUpload.constructor) - * @return [AWS.S3.ManagedUpload] the managed upload object that can call - * `send()` or track progress. - * @example Uploading a stream object - * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - * s3.upload(params, function(err, data) { - * console.log(err, data); - * }); - * @example Uploading a stream with concurrency of 1 and partSize of 10mb - * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - * var options = {partSize: 10 * 1024 * 1024, queueSize: 1}; - * s3.upload(params, options, function(err, data) { - * console.log(err, data); - * }); - * @callback callback function(err, data) - * @param err [Error] an error or null if no error occurred. - * @param data [map] The response data from the successful upload: - * @param data.Location [String] the URL of the uploaded object - * @param data.ETag [String] the ETag of the uploaded object - * @param data.Bucket [String] the bucket to which the object was uploaded - * @param data.Key [String] the key to which the object was uploaded - */ - upload: function upload(params, options, callback) { - if (typeof options === 'function' && callback === undefined) { - callback = options; - options = null; - } - - options = options || {}; - options = AWS.util.merge(options || {}, {service: this, params: params}); - - var uploader = new AWS.S3.ManagedUpload(options); - if (typeof callback === 'function') uploader.send(callback); - return uploader; - } -}); - -/** - * @api private - */ -AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency); -}; - -/** - * @api private - */ -AWS.S3.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.getSignedUrlPromise; -}; - -AWS.util.addPromises(AWS.S3); - - -/***/ }), - -/***/ 71207: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var s3util = __nccwpck_require__(35895); -var regionUtil = __nccwpck_require__(18262); - -AWS.util.update(AWS.S3Control.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('extractError', this.extractHostId); - request.addListener('extractData', this.extractHostId); - request.addListener('validate', this.validateAccountId); - - var isArnInBucket = s3util.isArnInParam(request, 'Bucket'); - var isArnInName = s3util.isArnInParam(request, 'Name'); - - if (isArnInBucket) { - request._parsedArn = AWS.util.ARN.parse(request.params['Bucket']); - request.addListener('validate', this.validateOutpostsBucketArn); - request.addListener('validate', s3util.validateOutpostsArn); - request.addListener('afterBuild', this.addOutpostIdHeader); - } else if (isArnInName) { - request._parsedArn = AWS.util.ARN.parse(request.params['Name']); - request.addListener('validate', s3util.validateOutpostsAccessPointArn); - request.addListener('validate', s3util.validateOutpostsArn); - request.addListener('afterBuild', this.addOutpostIdHeader); - } - - if (isArnInBucket || isArnInName) { - request.addListener('validate', this.validateArnRegion); - request.addListener('validate', this.validateArnAccountWithParams, true); - request.addListener('validate', s3util.validateArnAccount); - request.addListener('validate', s3util.validateArnService); - request.addListener('build', this.populateParamFromArn, true); - request.addListener('build', this.populateUriFromArn); - request.addListener('build', s3util.validatePopulateUriFromArn); - } - - if (request.params.OutpostId && - (request.operation === 'createBucket' || - request.operation === 'listRegionalBuckets')) { - request.addListener('build', this.populateEndpointForOutpostId); - } - }, - - /** - * Adds outpostId header - */ - addOutpostIdHeader: function addOutpostIdHeader(req) { - req.httpRequest.headers['x-amz-outpost-id'] = req._parsedArn.outpostId; - }, - - /** - * Validate Outposts ARN supplied in Bucket parameter is a valid bucket name - */ - validateOutpostsBucketArn: function validateOutpostsBucketArn(req) { - var parsedArn = req._parsedArn; - - //can be ':' or '/' - var delimiter = parsedArn.resource['outpost'.length]; - - if (parsedArn.resource.split(delimiter).length !== 4) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Bucket ARN should have two resources outpost/{outpostId}/bucket/{accesspointName}' - }); - } - - var bucket = parsedArn.resource.split(delimiter)[3]; - if (!s3util.dnsCompatibleBucketName(bucket) || bucket.match(/\./)) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Bucket ARN is not DNS compatible. Got ' + bucket - }); - } - - //set parsed valid bucket - req._parsedArn.bucket = bucket; - }, - - /** - * @api private - */ - populateParamFromArn: function populateParamFromArn(req) { - var parsedArn = req._parsedArn; - if (s3util.isArnInParam(req, 'Bucket')) { - req.params.Bucket = parsedArn.bucket; - } else if (s3util.isArnInParam(req, 'Name')) { - req.params.Name = parsedArn.accessPoint; - } - }, - - /** - * Populate URI according to the ARN - */ - populateUriFromArn: function populateUriFromArn(req) { - var parsedArn = req._parsedArn; - - var endpoint = req.httpRequest.endpoint; - var useArnRegion = req.service.config.s3UseArnRegion; - var useFipsEndpoint = req.service.config.useFipsEndpoint; - - endpoint.hostname = [ - 's3-outposts' + (useFipsEndpoint ? '-fips': ''), - useArnRegion ? parsedArn.region : req.service.config.region, - 'amazonaws.com' - ].join('.'); - endpoint.host = endpoint.hostname; - }, - - /** - * @api private - */ - populateEndpointForOutpostId: function populateEndpointForOutpostId(req) { - var endpoint = req.httpRequest.endpoint; - var useFipsEndpoint = req.service.config.useFipsEndpoint; - endpoint.hostname = [ - 's3-outposts' + (useFipsEndpoint ? '-fips': ''), - req.service.config.region, - 'amazonaws.com' - ].join('.'); - endpoint.host = endpoint.hostname; - }, - - /** - * @api private - */ - extractHostId: function(response) { - var hostId = response.httpResponse.headers ? response.httpResponse.headers['x-amz-id-2'] : null; - response.extendedRequestId = hostId; - if (response.error) { - response.error.extendedRequestId = hostId; - } - }, - - /** - * @api private - */ - validateArnRegion: function validateArnRegion(req) { - s3util.validateArnRegion(req, { allowFipsEndpoint: true }); - }, - - /** - * @api private - */ - validateArnAccountWithParams: function validateArnAccountWithParams(req) { - var params = req.params; - var inputModel = req.service.api.operations[req.operation].input; - if (inputModel.members.AccountId) { - var parsedArn = req._parsedArn; - if (parsedArn.accountId) { - if (params.AccountId) { - if (params.AccountId !== parsedArn.accountId) { - throw AWS.util.error( - new Error(), - {code: 'ValidationError', message: 'AccountId in ARN and request params should be same.'} - ); - } - } else { - // Store accountId from ARN in params - params.AccountId = parsedArn.accountId; - } - } - } - }, - - /** - * @api private - */ - validateAccountId: function(request) { - var params = request.params; - if (!Object.prototype.hasOwnProperty.call(params, 'AccountId')) return; - var accountId = params.AccountId; - //validate type - if (typeof accountId !== 'string') { - throw AWS.util.error( - new Error(), - {code: 'ValidationError', message: 'AccountId must be a string.'} - ); - } - //validate length - if (accountId.length < 1 || accountId.length > 63) { - throw AWS.util.error( - new Error(), - {code: 'ValidationError', message: 'AccountId length should be between 1 to 63 characters, inclusive.'} - ); - } - //validate pattern - var hostPattern = /^[a-zA-Z0-9]{1}$|^[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]$/; - if (!hostPattern.test(accountId)) { - throw AWS.util.error(new Error(), - {code: 'ValidationError', message: 'AccountId should be hostname compatible. AccountId: ' + accountId}); - } - }, - - /** - * @api private - */ - getSigningName: function getSigningName(req) { - var _super = AWS.Service.prototype.getSigningName; - if (req && req._parsedArn && req._parsedArn.service) { - return req._parsedArn.service; - } else if (req.params.OutpostId && - (req.operation === 'createBucket' || - req.operation === 'listRegionalBuckets')) { - return 's3-outposts'; - } else { - return _super.call(this, req); - } - }, -}); - - -/***/ }), - -/***/ 35895: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var regionUtil = __nccwpck_require__(18262); - -var s3util = { - /** - * @api private - */ - isArnInParam: function isArnInParam(req, paramName) { - var inputShape = (req.service.api.operations[req.operation] || {}).input || {}; - var inputMembers = inputShape.members || {}; - if (!req.params[paramName] || !inputMembers[paramName]) return false; - return AWS.util.ARN.validate(req.params[paramName]); - }, - - /** - * Validate service component from ARN supplied in Bucket parameter - */ - validateArnService: function validateArnService(req) { - var parsedArn = req._parsedArn; - - if (parsedArn.service !== 's3' - && parsedArn.service !== 's3-outposts' - && parsedArn.service !== 's3-object-lambda') { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'expect \'s3\' or \'s3-outposts\' or \'s3-object-lambda\' in ARN service component' - }); - } - }, - - /** - * Validate account ID from ARN supplied in Bucket parameter is a valid account - */ - validateArnAccount: function validateArnAccount(req) { - var parsedArn = req._parsedArn; - - if (!/[0-9]{12}/.exec(parsedArn.accountId)) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'ARN accountID does not match regex "[0-9]{12}"' - }); - } - }, - - /** - * Validate ARN supplied in Bucket parameter is a valid access point ARN - */ - validateS3AccessPointArn: function validateS3AccessPointArn(req) { - var parsedArn = req._parsedArn; - - //can be ':' or '/' - var delimiter = parsedArn.resource['accesspoint'.length]; - - if (parsedArn.resource.split(delimiter).length !== 2) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Access Point ARN should have one resource accesspoint/{accesspointName}' - }); - } - - var accessPoint = parsedArn.resource.split(delimiter)[1]; - var accessPointPrefix = accessPoint + '-' + parsedArn.accountId; - if (!s3util.dnsCompatibleBucketName(accessPointPrefix) || accessPointPrefix.match(/\./)) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Access point resource in ARN is not DNS compatible. Got ' + accessPoint - }); - } - - //set parsed valid access point - req._parsedArn.accessPoint = accessPoint; - }, - - /** - * Validate Outposts ARN supplied in Bucket parameter is a valid outposts ARN - */ - validateOutpostsArn: function validateOutpostsArn(req) { - var parsedArn = req._parsedArn; - - if ( - parsedArn.resource.indexOf('outpost:') !== 0 && - parsedArn.resource.indexOf('outpost/') !== 0 - ) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'ARN resource should begin with \'outpost/\'' - }); - } - - //can be ':' or '/' - var delimiter = parsedArn.resource['outpost'.length]; - var outpostId = parsedArn.resource.split(delimiter)[1]; - var dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); - if (!dnsHostRegex.test(outpostId)) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Outpost resource in ARN is not DNS compatible. Got ' + outpostId - }); - } - req._parsedArn.outpostId = outpostId; - }, - - /** - * Validate Outposts ARN supplied in Bucket parameter is a valid outposts ARN - */ - validateOutpostsAccessPointArn: function validateOutpostsAccessPointArn(req) { - var parsedArn = req._parsedArn; - - //can be ':' or '/' - var delimiter = parsedArn.resource['outpost'.length]; - - if (parsedArn.resource.split(delimiter).length !== 4) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Outposts ARN should have two resources outpost/{outpostId}/accesspoint/{accesspointName}' - }); - } - - var accessPoint = parsedArn.resource.split(delimiter)[3]; - var accessPointPrefix = accessPoint + '-' + parsedArn.accountId; - if (!s3util.dnsCompatibleBucketName(accessPointPrefix) || accessPointPrefix.match(/\./)) { - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: 'Access point resource in ARN is not DNS compatible. Got ' + accessPoint - }); - } - - //set parsed valid access point - req._parsedArn.accessPoint = accessPoint; - }, - - /** - * Validate region field in ARN supplied in Bucket parameter is a valid region - */ - validateArnRegion: function validateArnRegion(req, options) { - if (options === undefined) { - options = {}; - } - - var useArnRegion = s3util.loadUseArnRegionConfig(req); - var regionFromArn = req._parsedArn.region; - var clientRegion = req.service.config.region; - var useFipsEndpoint = req.service.config.useFipsEndpoint; - var allowFipsEndpoint = options.allowFipsEndpoint || false; - - if (!regionFromArn) { - var message = 'ARN region is empty'; - if (req._parsedArn.service === 's3') { - message = message + '\nYou may want to use multi-regional ARN. The feature is not supported in current SDK. ' + - 'You should consider switching to V3(https://github.com/aws/aws-sdk-js-v3).'; - } - throw AWS.util.error(new Error(), { - code: 'InvalidARN', - message: message - }); - } - - if (useFipsEndpoint && !allowFipsEndpoint) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'ARN endpoint is not compatible with FIPS region' - }); - } - - if (regionFromArn.indexOf('fips') >= 0) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'FIPS region not allowed in ARN' - }); - } - - if (!useArnRegion && regionFromArn !== clientRegion) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'Configured region conflicts with access point region' - }); - } else if ( - useArnRegion && - regionUtil.getEndpointSuffix(regionFromArn) !== regionUtil.getEndpointSuffix(clientRegion) - ) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'Configured region and access point region not in same partition' - }); - } - - if (req.service.config.useAccelerateEndpoint) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'useAccelerateEndpoint config is not supported with access point ARN' - }); - } - - if (req._parsedArn.service === 's3-outposts' && req.service.config.useDualstackEndpoint) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'Dualstack is not supported with outposts access point ARN' - }); - } - }, - - loadUseArnRegionConfig: function loadUseArnRegionConfig(req) { - var envName = 'AWS_S3_USE_ARN_REGION'; - var configName = 's3_use_arn_region'; - var useArnRegion = true; - var originalConfig = req.service._originalConfig || {}; - if (req.service.config.s3UseArnRegion !== undefined) { - return req.service.config.s3UseArnRegion; - } else if (originalConfig.s3UseArnRegion !== undefined) { - useArnRegion = originalConfig.s3UseArnRegion === true; - } else if (AWS.util.isNode()) { - //load from environmental variable AWS_USE_ARN_REGION - if (process.env[envName]) { - var value = process.env[envName].trim().toLowerCase(); - if (['false', 'true'].indexOf(value) < 0) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: envName + ' only accepts true or false. Got ' + process.env[envName], - retryable: false - }); - } - useArnRegion = value === 'true'; - } else { //load from shared config property use_arn_region - var profiles = {}; - var profile = {}; - try { - profiles = AWS.util.getProfilesFromSharedConfig(AWS.util.iniLoader); - profile = profiles[process.env.AWS_PROFILE || AWS.util.defaultProfile]; - } catch (e) {} - if (profile[configName]) { - if (['false', 'true'].indexOf(profile[configName].trim().toLowerCase()) < 0) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: configName + ' only accepts true or false. Got ' + profile[configName], - retryable: false - }); - } - useArnRegion = profile[configName].trim().toLowerCase() === 'true'; - } - } - } - req.service.config.s3UseArnRegion = useArnRegion; - return useArnRegion; - }, - - /** - * Validations before URI can be populated - */ - validatePopulateUriFromArn: function validatePopulateUriFromArn(req) { - if (req.service._originalConfig && req.service._originalConfig.endpoint) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'Custom endpoint is not compatible with access point ARN' - }); - } - - if (req.service.config.s3ForcePathStyle) { - throw AWS.util.error(new Error(), { - code: 'InvalidConfiguration', - message: 'Cannot construct path-style endpoint with access point' - }); - } - }, - - /** - * Returns true if the bucket name is DNS compatible. Buckets created - * outside of the classic region MUST be DNS compatible. - * - * @api private - */ - dnsCompatibleBucketName: function dnsCompatibleBucketName(bucketName) { - var b = bucketName; - var domain = new RegExp(/^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/); - var ipAddress = new RegExp(/(\d+\.){3}\d+/); - var dots = new RegExp(/\.\./); - return (b.match(domain) && !b.match(ipAddress) && !b.match(dots)) ? true : false; - }, -}; - -/** - * @api private - */ -module.exports = s3util; - - -/***/ }), - -/***/ 94571: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.update(AWS.SQS.prototype, { - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('build', this.buildEndpoint); - - if (request.service.config.computeChecksums) { - if (request.operation === 'sendMessage') { - request.addListener('extractData', this.verifySendMessageChecksum); - } else if (request.operation === 'sendMessageBatch') { - request.addListener('extractData', this.verifySendMessageBatchChecksum); - } else if (request.operation === 'receiveMessage') { - request.addListener('extractData', this.verifyReceiveMessageChecksum); - } - } - }, - - /** - * @api private - */ - verifySendMessageChecksum: function verifySendMessageChecksum(response) { - if (!response.data) return; - - var md5 = response.data.MD5OfMessageBody; - var body = this.params.MessageBody; - var calculatedMd5 = this.service.calculateChecksum(body); - if (calculatedMd5 !== md5) { - var msg = 'Got "' + response.data.MD5OfMessageBody + - '", expecting "' + calculatedMd5 + '".'; - this.service.throwInvalidChecksumError(response, - [response.data.MessageId], msg); - } - }, - - /** - * @api private - */ - verifySendMessageBatchChecksum: function verifySendMessageBatchChecksum(response) { - if (!response.data) return; - - var service = this.service; - var entries = {}; - var errors = []; - var messageIds = []; - AWS.util.arrayEach(response.data.Successful, function (entry) { - entries[entry.Id] = entry; - }); - AWS.util.arrayEach(this.params.Entries, function (entry) { - if (entries[entry.Id]) { - var md5 = entries[entry.Id].MD5OfMessageBody; - var body = entry.MessageBody; - if (!service.isChecksumValid(md5, body)) { - errors.push(entry.Id); - messageIds.push(entries[entry.Id].MessageId); - } - } - }); - - if (errors.length > 0) { - service.throwInvalidChecksumError(response, messageIds, - 'Invalid messages: ' + errors.join(', ')); - } - }, - - /** - * @api private - */ - verifyReceiveMessageChecksum: function verifyReceiveMessageChecksum(response) { - if (!response.data) return; - - var service = this.service; - var messageIds = []; - AWS.util.arrayEach(response.data.Messages, function(message) { - var md5 = message.MD5OfBody; - var body = message.Body; - if (!service.isChecksumValid(md5, body)) { - messageIds.push(message.MessageId); - } - }); - - if (messageIds.length > 0) { - service.throwInvalidChecksumError(response, messageIds, - 'Invalid messages: ' + messageIds.join(', ')); - } - }, - - /** - * @api private - */ - throwInvalidChecksumError: function throwInvalidChecksumError(response, ids, message) { - response.error = AWS.util.error(new Error(), { - retryable: true, - code: 'InvalidChecksum', - messageIds: ids, - message: response.request.operation + - ' returned an invalid MD5 response. ' + message - }); - }, - - /** - * @api private - */ - isChecksumValid: function isChecksumValid(checksum, data) { - return this.calculateChecksum(data) === checksum; - }, - - /** - * @api private - */ - calculateChecksum: function calculateChecksum(data) { - return AWS.util.crypto.md5(data, 'hex'); - }, - - /** - * @api private - */ - buildEndpoint: function buildEndpoint(request) { - var url = request.httpRequest.params.QueueUrl; - if (url) { - request.httpRequest.endpoint = new AWS.Endpoint(url); - - // signature version 4 requires the region name to be set, - // sqs queue urls contain the region name - var matches = request.httpRequest.endpoint.host.match(/^sqs\.(.+?)\./); - if (matches) request.httpRequest.region = matches[1]; - } - } -}); - - -/***/ }), - -/***/ 91055: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var resolveRegionalEndpointsFlag = __nccwpck_require__(85566); -var ENV_REGIONAL_ENDPOINT_ENABLED = 'AWS_STS_REGIONAL_ENDPOINTS'; -var CONFIG_REGIONAL_ENDPOINT_ENABLED = 'sts_regional_endpoints'; - -AWS.util.update(AWS.STS.prototype, { - /** - * @overload credentialsFrom(data, credentials = null) - * Creates a credentials object from STS response data containing - * credentials information. Useful for quickly setting AWS credentials. - * - * @note This is a low-level utility function. If you want to load temporary - * credentials into your process for subsequent requests to AWS resources, - * you should use {AWS.TemporaryCredentials} instead. - * @param data [map] data retrieved from a call to {getFederatedToken}, - * {getSessionToken}, {assumeRole}, or {assumeRoleWithWebIdentity}. - * @param credentials [AWS.Credentials] an optional credentials object to - * fill instead of creating a new object. Useful when modifying an - * existing credentials object from a refresh call. - * @return [AWS.TemporaryCredentials] the set of temporary credentials - * loaded from a raw STS operation response. - * @example Using credentialsFrom to load global AWS credentials - * var sts = new AWS.STS(); - * sts.getSessionToken(function (err, data) { - * if (err) console.log("Error getting credentials"); - * else { - * AWS.config.credentials = sts.credentialsFrom(data); - * } - * }); - * @see AWS.TemporaryCredentials - */ - credentialsFrom: function credentialsFrom(data, credentials) { - if (!data) return null; - if (!credentials) credentials = new AWS.TemporaryCredentials(); - credentials.expired = false; - credentials.accessKeyId = data.Credentials.AccessKeyId; - credentials.secretAccessKey = data.Credentials.SecretAccessKey; - credentials.sessionToken = data.Credentials.SessionToken; - credentials.expireTime = data.Credentials.Expiration; - return credentials; - }, - - assumeRoleWithWebIdentity: function assumeRoleWithWebIdentity(params, callback) { - return this.makeUnauthenticatedRequest('assumeRoleWithWebIdentity', params, callback); - }, - - assumeRoleWithSAML: function assumeRoleWithSAML(params, callback) { - return this.makeUnauthenticatedRequest('assumeRoleWithSAML', params, callback); - }, - - /** - * @api private - */ - setupRequestListeners: function setupRequestListeners(request) { - request.addListener('validate', this.optInRegionalEndpoint, true); - }, - - /** - * @api private - */ - optInRegionalEndpoint: function optInRegionalEndpoint(req) { - var service = req.service; - var config = service.config; - config.stsRegionalEndpoints = resolveRegionalEndpointsFlag(service._originalConfig, { - env: ENV_REGIONAL_ENDPOINT_ENABLED, - sharedConfig: CONFIG_REGIONAL_ENDPOINT_ENABLED, - clientConfig: 'stsRegionalEndpoints' - }); - if ( - config.stsRegionalEndpoints === 'regional' && - service.isGlobalEndpoint - ) { - //client will throw if region is not supplied; request will be signed with specified region - if (!config.region) { - throw AWS.util.error(new Error(), - {code: 'ConfigError', message: 'Missing region in config'}); - } - var insertPoint = config.endpoint.indexOf('.amazonaws.com'); - var regionalEndpoint = config.endpoint.substring(0, insertPoint) + - '.' + config.region + config.endpoint.substring(insertPoint); - req.httpRequest.updateEndpoint(regionalEndpoint); - req.httpRequest.region = config.region; - } - } - -}); - - -/***/ }), - -/***/ 31987: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -AWS.util.hideProperties(AWS, ['SimpleWorkflow']); - -/** - * @constant - * @readonly - * Backwards compatibility for access to the {AWS.SWF} service class. - */ -AWS.SimpleWorkflow = AWS.SWF; - - -/***/ }), - -/***/ 29697: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var IniLoader = (__nccwpck_require__(95417).IniLoader); -/** - * Singleton object to load specified config/credentials files. - * It will cache all the files ever loaded; - */ -module.exports.b = new IniLoader(); - - -/***/ }), - -/***/ 95417: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var os = __nccwpck_require__(22037); -var path = __nccwpck_require__(71017); - -function parseFile(filename) { - return AWS.util.ini.parse(AWS.util.readFileSync(filename)); -} - -function getProfiles(fileContent) { - var tmpContent = {}; - Object.keys(fileContent).forEach(function(sectionName) { - if (/^sso-session\s/.test(sectionName)) return; - Object.defineProperty(tmpContent, sectionName.replace(/^profile\s/, ''), { - value: fileContent[sectionName], - enumerable: true - }); - }); - return tmpContent; -} - -function getSsoSessions(fileContent) { - var tmpContent = {}; - Object.keys(fileContent).forEach(function(sectionName) { - if (!/^sso-session\s/.test(sectionName)) return; - Object.defineProperty(tmpContent, sectionName.replace(/^sso-session\s/, ''), { - value: fileContent[sectionName], - enumerable: true - }); - }); - return tmpContent; -} - -/** - * Ini file loader class the same as that used in the SDK. It loads and - * parses config and credentials files in .ini format and cache the content - * to assure files are only read once. - * Note that calling operations on the instance instantiated from this class - * won't affect the behavior of SDK since SDK uses an internal singleton of - * this class. - * @!macro nobrowser - */ -AWS.IniLoader = AWS.util.inherit({ - constructor: function IniLoader() { - this.resolvedProfiles = {}; - this.resolvedSsoSessions = {}; - }, - - /** Remove all cached files. Used after config files are updated. */ - clearCachedFiles: function clearCachedFiles() { - this.resolvedProfiles = {}; - this.resolvedSsoSessions = {}; - }, - - /** - * Load configurations from config/credentials files and cache them - * for later use. If no file is specified it will try to load default files. - * - * @param options [map] information describing the file - * @option options filename [String] ('~/.aws/credentials' or defined by - * AWS_SHARED_CREDENTIALS_FILE process env var or '~/.aws/config' if - * isConfig is set to true) - * path to the file to be read. - * @option options isConfig [Boolean] (false) True to read config file. - * @return [map] object containing contents from file in key-value - * pairs. - */ - loadFrom: function loadFrom(options) { - options = options || {}; - var isConfig = options.isConfig === true; - var filename = options.filename || this.getDefaultFilePath(isConfig); - if (!this.resolvedProfiles[filename]) { - var fileContent = parseFile(filename); - if (isConfig) { - Object.defineProperty(this.resolvedProfiles, filename, { - value: getProfiles(fileContent) - }); - } else { - Object.defineProperty(this.resolvedProfiles, filename, { value: fileContent }); - } - } - return this.resolvedProfiles[filename]; - }, - - /** - * Load sso sessions from config/credentials files and cache them - * for later use. If no file is specified it will try to load default file. - * - * @param options [map] information describing the file - * @option options filename [String] ('~/.aws/config' or defined by - * AWS_CONFIG_FILE process env var) - * @return [map] object containing contents from file in key-value - * pairs. - */ - loadSsoSessionsFrom: function loadSsoSessionsFrom(options) { - options = options || {}; - var filename = options.filename || this.getDefaultFilePath(true); - if (!this.resolvedSsoSessions[filename]) { - var fileContent = parseFile(filename); - Object.defineProperty(this.resolvedSsoSessions, filename, { - value: getSsoSessions(fileContent) - }); - } - return this.resolvedSsoSessions[filename]; - }, - - /** - * @api private - */ - getDefaultFilePath: function getDefaultFilePath(isConfig) { - return path.join( - this.getHomeDir(), - '.aws', - isConfig ? 'config' : 'credentials' - ); - }, - - /** - * @api private - */ - getHomeDir: function getHomeDir() { - var env = process.env; - var home = env.HOME || - env.USERPROFILE || - (env.HOMEPATH ? ((env.HOMEDRIVE || 'C:/') + env.HOMEPATH) : null); - - if (home) { - return home; - } - - if (typeof os.homedir === 'function') { - return os.homedir(); - } - - throw AWS.util.error( - new Error('Cannot load credentials, HOME path not set') - ); - } -}); - -var IniLoader = AWS.IniLoader; - -module.exports = { - IniLoader: IniLoader -}; - - -/***/ }), - -/***/ 98382: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * @api private - */ -AWS.Signers.Bearer = AWS.util.inherit(AWS.Signers.RequestSigner, { - constructor: function Bearer(request) { - AWS.Signers.RequestSigner.call(this, request); - }, - - addAuthorization: function addAuthorization(token) { - this.request.headers['Authorization'] = 'Bearer ' + token.token; - } -}); - - -/***/ }), - -/***/ 60328: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -var expiresHeader = 'presigned-expires'; - -/** - * @api private - */ -function signedUrlBuilder(request) { - var expires = request.httpRequest.headers[expiresHeader]; - var signerClass = request.service.getSignerClass(request); - - delete request.httpRequest.headers['User-Agent']; - delete request.httpRequest.headers['X-Amz-User-Agent']; - - if (signerClass === AWS.Signers.V4) { - if (expires > 604800) { // one week expiry is invalid - var message = 'Presigning does not support expiry time greater ' + - 'than a week with SigV4 signing.'; - throw AWS.util.error(new Error(), { - code: 'InvalidExpiryTime', message: message, retryable: false - }); - } - request.httpRequest.headers[expiresHeader] = expires; - } else if (signerClass === AWS.Signers.S3) { - var now = request.service ? request.service.getSkewCorrectedDate() : AWS.util.date.getDate(); - request.httpRequest.headers[expiresHeader] = parseInt( - AWS.util.date.unixTimestamp(now) + expires, 10).toString(); - } else { - throw AWS.util.error(new Error(), { - message: 'Presigning only supports S3 or SigV4 signing.', - code: 'UnsupportedSigner', retryable: false - }); - } -} - -/** - * @api private - */ -function signedUrlSigner(request) { - var endpoint = request.httpRequest.endpoint; - var parsedUrl = AWS.util.urlParse(request.httpRequest.path); - var queryParams = {}; - - if (parsedUrl.search) { - queryParams = AWS.util.queryStringParse(parsedUrl.search.substr(1)); - } - - var auth = request.httpRequest.headers['Authorization'].split(' '); - if (auth[0] === 'AWS') { - auth = auth[1].split(':'); - queryParams['Signature'] = auth.pop(); - queryParams['AWSAccessKeyId'] = auth.join(':'); - - AWS.util.each(request.httpRequest.headers, function (key, value) { - if (key === expiresHeader) key = 'Expires'; - if (key.indexOf('x-amz-meta-') === 0) { - // Delete existing, potentially not normalized key - delete queryParams[key]; - key = key.toLowerCase(); - } - queryParams[key] = value; - }); - delete request.httpRequest.headers[expiresHeader]; - delete queryParams['Authorization']; - delete queryParams['Host']; - } else if (auth[0] === 'AWS4-HMAC-SHA256') { // SigV4 signing - auth.shift(); - var rest = auth.join(' '); - var signature = rest.match(/Signature=(.*?)(?:,|\s|\r?\n|$)/)[1]; - queryParams['X-Amz-Signature'] = signature; - delete queryParams['Expires']; - } - - // build URL - endpoint.pathname = parsedUrl.pathname; - endpoint.search = AWS.util.queryParamsToString(queryParams); -} - -/** - * @api private - */ -AWS.Signers.Presign = inherit({ - /** - * @api private - */ - sign: function sign(request, expireTime, callback) { - request.httpRequest.headers[expiresHeader] = expireTime || 3600; - request.on('build', signedUrlBuilder); - request.on('sign', signedUrlSigner); - request.removeListener('afterBuild', - AWS.EventListeners.Core.SET_CONTENT_LENGTH); - request.removeListener('afterBuild', - AWS.EventListeners.Core.COMPUTE_SHA256); - - request.emit('beforePresign', [request]); - - if (callback) { - request.build(function() { - if (this.response.error) callback(this.response.error); - else { - callback(null, AWS.util.urlFormat(request.httpRequest.endpoint)); - } - }); - } else { - request.build(); - if (request.response.error) throw request.response.error; - return AWS.util.urlFormat(request.httpRequest.endpoint); - } - } -}); - -/** - * @api private - */ -module.exports = AWS.Signers.Presign; - - -/***/ }), - -/***/ 9897: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -var inherit = AWS.util.inherit; - -/** - * @api private - */ -AWS.Signers.RequestSigner = inherit({ - constructor: function RequestSigner(request) { - this.request = request; - }, - - setServiceClientId: function setServiceClientId(id) { - this.serviceClientId = id; - }, - - getServiceClientId: function getServiceClientId() { - return this.serviceClientId; - } -}); - -AWS.Signers.RequestSigner.getVersion = function getVersion(version) { - switch (version) { - case 'v2': return AWS.Signers.V2; - case 'v3': return AWS.Signers.V3; - case 's3v4': return AWS.Signers.V4; - case 'v4': return AWS.Signers.V4; - case 's3': return AWS.Signers.S3; - case 'v3https': return AWS.Signers.V3Https; - case 'bearer': return AWS.Signers.Bearer; - } - throw new Error('Unknown signing version ' + version); -}; - -__nccwpck_require__(28489); -__nccwpck_require__(66458); -__nccwpck_require__(24473); -__nccwpck_require__(26529); -__nccwpck_require__(58616); -__nccwpck_require__(60328); -__nccwpck_require__(98382); - - -/***/ }), - -/***/ 58616: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -AWS.Signers.S3 = inherit(AWS.Signers.RequestSigner, { - /** - * When building the stringToSign, these sub resource params should be - * part of the canonical resource string with their NON-decoded values - */ - subResources: { - 'acl': 1, - 'accelerate': 1, - 'analytics': 1, - 'cors': 1, - 'lifecycle': 1, - 'delete': 1, - 'inventory': 1, - 'location': 1, - 'logging': 1, - 'metrics': 1, - 'notification': 1, - 'partNumber': 1, - 'policy': 1, - 'requestPayment': 1, - 'replication': 1, - 'restore': 1, - 'tagging': 1, - 'torrent': 1, - 'uploadId': 1, - 'uploads': 1, - 'versionId': 1, - 'versioning': 1, - 'versions': 1, - 'website': 1 - }, - - // when building the stringToSign, these querystring params should be - // part of the canonical resource string with their NON-encoded values - responseHeaders: { - 'response-content-type': 1, - 'response-content-language': 1, - 'response-expires': 1, - 'response-cache-control': 1, - 'response-content-disposition': 1, - 'response-content-encoding': 1 - }, - - addAuthorization: function addAuthorization(credentials, date) { - if (!this.request.headers['presigned-expires']) { - this.request.headers['X-Amz-Date'] = AWS.util.date.rfc822(date); - } - - if (credentials.sessionToken) { - // presigned URLs require this header to be lowercased - this.request.headers['x-amz-security-token'] = credentials.sessionToken; - } - - var signature = this.sign(credentials.secretAccessKey, this.stringToSign()); - var auth = 'AWS ' + credentials.accessKeyId + ':' + signature; - - this.request.headers['Authorization'] = auth; - }, - - stringToSign: function stringToSign() { - var r = this.request; - - var parts = []; - parts.push(r.method); - parts.push(r.headers['Content-MD5'] || ''); - parts.push(r.headers['Content-Type'] || ''); - - // This is the "Date" header, but we use X-Amz-Date. - // The S3 signing mechanism requires us to pass an empty - // string for this Date header regardless. - parts.push(r.headers['presigned-expires'] || ''); - - var headers = this.canonicalizedAmzHeaders(); - if (headers) parts.push(headers); - parts.push(this.canonicalizedResource()); - - return parts.join('\n'); - - }, - - canonicalizedAmzHeaders: function canonicalizedAmzHeaders() { - - var amzHeaders = []; - - AWS.util.each(this.request.headers, function (name) { - if (name.match(/^x-amz-/i)) - amzHeaders.push(name); - }); - - amzHeaders.sort(function (a, b) { - return a.toLowerCase() < b.toLowerCase() ? -1 : 1; - }); - - var parts = []; - AWS.util.arrayEach.call(this, amzHeaders, function (name) { - parts.push(name.toLowerCase() + ':' + String(this.request.headers[name])); - }); - - return parts.join('\n'); - - }, - - canonicalizedResource: function canonicalizedResource() { - - var r = this.request; - - var parts = r.path.split('?'); - var path = parts[0]; - var querystring = parts[1]; - - var resource = ''; - - if (r.virtualHostedBucket) - resource += '/' + r.virtualHostedBucket; - - resource += path; - - if (querystring) { - - // collect a list of sub resources and query params that need to be signed - var resources = []; - - AWS.util.arrayEach.call(this, querystring.split('&'), function (param) { - var name = param.split('=')[0]; - var value = param.split('=')[1]; - if (this.subResources[name] || this.responseHeaders[name]) { - var subresource = { name: name }; - if (value !== undefined) { - if (this.subResources[name]) { - subresource.value = value; - } else { - subresource.value = decodeURIComponent(value); - } - } - resources.push(subresource); - } - }); - - resources.sort(function (a, b) { return a.name < b.name ? -1 : 1; }); - - if (resources.length) { - - querystring = []; - AWS.util.arrayEach(resources, function (res) { - if (res.value === undefined) { - querystring.push(res.name); - } else { - querystring.push(res.name + '=' + res.value); - } - }); - - resource += '?' + querystring.join('&'); - } - - } - - return resource; - - }, - - sign: function sign(secret, string) { - return AWS.util.crypto.hmac(secret, string, 'base64', 'sha1'); - } -}); - -/** - * @api private - */ -module.exports = AWS.Signers.S3; - - -/***/ }), - -/***/ 28489: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -AWS.Signers.V2 = inherit(AWS.Signers.RequestSigner, { - addAuthorization: function addAuthorization(credentials, date) { - - if (!date) date = AWS.util.date.getDate(); - - var r = this.request; - - r.params.Timestamp = AWS.util.date.iso8601(date); - r.params.SignatureVersion = '2'; - r.params.SignatureMethod = 'HmacSHA256'; - r.params.AWSAccessKeyId = credentials.accessKeyId; - - if (credentials.sessionToken) { - r.params.SecurityToken = credentials.sessionToken; - } - - delete r.params.Signature; // delete old Signature for re-signing - r.params.Signature = this.signature(credentials); - - r.body = AWS.util.queryParamsToString(r.params); - r.headers['Content-Length'] = r.body.length; - }, - - signature: function signature(credentials) { - return AWS.util.crypto.hmac(credentials.secretAccessKey, this.stringToSign(), 'base64'); - }, - - stringToSign: function stringToSign() { - var parts = []; - parts.push(this.request.method); - parts.push(this.request.endpoint.host.toLowerCase()); - parts.push(this.request.pathname()); - parts.push(AWS.util.queryParamsToString(this.request.params)); - return parts.join('\n'); - } - -}); - -/** - * @api private - */ -module.exports = AWS.Signers.V2; - - -/***/ }), - -/***/ 66458: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -AWS.Signers.V3 = inherit(AWS.Signers.RequestSigner, { - addAuthorization: function addAuthorization(credentials, date) { - - var datetime = AWS.util.date.rfc822(date); - - this.request.headers['X-Amz-Date'] = datetime; - - if (credentials.sessionToken) { - this.request.headers['x-amz-security-token'] = credentials.sessionToken; - } - - this.request.headers['X-Amzn-Authorization'] = - this.authorization(credentials, datetime); - - }, - - authorization: function authorization(credentials) { - return 'AWS3 ' + - 'AWSAccessKeyId=' + credentials.accessKeyId + ',' + - 'Algorithm=HmacSHA256,' + - 'SignedHeaders=' + this.signedHeaders() + ',' + - 'Signature=' + this.signature(credentials); - }, - - signedHeaders: function signedHeaders() { - var headers = []; - AWS.util.arrayEach(this.headersToSign(), function iterator(h) { - headers.push(h.toLowerCase()); - }); - return headers.sort().join(';'); - }, - - canonicalHeaders: function canonicalHeaders() { - var headers = this.request.headers; - var parts = []; - AWS.util.arrayEach(this.headersToSign(), function iterator(h) { - parts.push(h.toLowerCase().trim() + ':' + String(headers[h]).trim()); - }); - return parts.sort().join('\n') + '\n'; - }, - - headersToSign: function headersToSign() { - var headers = []; - AWS.util.each(this.request.headers, function iterator(k) { - if (k === 'Host' || k === 'Content-Encoding' || k.match(/^X-Amz/i)) { - headers.push(k); - } - }); - return headers; - }, - - signature: function signature(credentials) { - return AWS.util.crypto.hmac(credentials.secretAccessKey, this.stringToSign(), 'base64'); - }, - - stringToSign: function stringToSign() { - var parts = []; - parts.push(this.request.method); - parts.push('/'); - parts.push(''); - parts.push(this.canonicalHeaders()); - parts.push(this.request.body); - return AWS.util.crypto.sha256(parts.join('\n')); - } - -}); - -/** - * @api private - */ -module.exports = AWS.Signers.V3; - - -/***/ }), - -/***/ 24473: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var inherit = AWS.util.inherit; - -__nccwpck_require__(66458); - -/** - * @api private - */ -AWS.Signers.V3Https = inherit(AWS.Signers.V3, { - authorization: function authorization(credentials) { - return 'AWS3-HTTPS ' + - 'AWSAccessKeyId=' + credentials.accessKeyId + ',' + - 'Algorithm=HmacSHA256,' + - 'Signature=' + this.signature(credentials); - }, - - stringToSign: function stringToSign() { - return this.request.headers['X-Amz-Date']; - } -}); - -/** - * @api private - */ -module.exports = AWS.Signers.V3Https; - - -/***/ }), - -/***/ 26529: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var v4Credentials = __nccwpck_require__(62660); -var inherit = AWS.util.inherit; - -/** - * @api private - */ -var expiresHeader = 'presigned-expires'; - -/** - * @api private - */ -AWS.Signers.V4 = inherit(AWS.Signers.RequestSigner, { - constructor: function V4(request, serviceName, options) { - AWS.Signers.RequestSigner.call(this, request); - this.serviceName = serviceName; - options = options || {}; - this.signatureCache = typeof options.signatureCache === 'boolean' ? options.signatureCache : true; - this.operation = options.operation; - this.signatureVersion = options.signatureVersion; - }, - - algorithm: 'AWS4-HMAC-SHA256', - - addAuthorization: function addAuthorization(credentials, date) { - var datetime = AWS.util.date.iso8601(date).replace(/[:\-]|\.\d{3}/g, ''); - - if (this.isPresigned()) { - this.updateForPresigned(credentials, datetime); - } else { - this.addHeaders(credentials, datetime); - } - - this.request.headers['Authorization'] = - this.authorization(credentials, datetime); - }, - - addHeaders: function addHeaders(credentials, datetime) { - this.request.headers['X-Amz-Date'] = datetime; - if (credentials.sessionToken) { - this.request.headers['x-amz-security-token'] = credentials.sessionToken; - } - }, - - updateForPresigned: function updateForPresigned(credentials, datetime) { - var credString = this.credentialString(datetime); - var qs = { - 'X-Amz-Date': datetime, - 'X-Amz-Algorithm': this.algorithm, - 'X-Amz-Credential': credentials.accessKeyId + '/' + credString, - 'X-Amz-Expires': this.request.headers[expiresHeader], - 'X-Amz-SignedHeaders': this.signedHeaders() - }; - - if (credentials.sessionToken) { - qs['X-Amz-Security-Token'] = credentials.sessionToken; - } - - if (this.request.headers['Content-Type']) { - qs['Content-Type'] = this.request.headers['Content-Type']; - } - if (this.request.headers['Content-MD5']) { - qs['Content-MD5'] = this.request.headers['Content-MD5']; - } - if (this.request.headers['Cache-Control']) { - qs['Cache-Control'] = this.request.headers['Cache-Control']; - } - - // need to pull in any other X-Amz-* headers - AWS.util.each.call(this, this.request.headers, function(key, value) { - if (key === expiresHeader) return; - if (this.isSignableHeader(key)) { - var lowerKey = key.toLowerCase(); - // Metadata should be normalized - if (lowerKey.indexOf('x-amz-meta-') === 0) { - qs[lowerKey] = value; - } else if (lowerKey.indexOf('x-amz-') === 0) { - qs[key] = value; - } - } - }); - - var sep = this.request.path.indexOf('?') >= 0 ? '&' : '?'; - this.request.path += sep + AWS.util.queryParamsToString(qs); - }, - - authorization: function authorization(credentials, datetime) { - var parts = []; - var credString = this.credentialString(datetime); - parts.push(this.algorithm + ' Credential=' + - credentials.accessKeyId + '/' + credString); - parts.push('SignedHeaders=' + this.signedHeaders()); - parts.push('Signature=' + this.signature(credentials, datetime)); - return parts.join(', '); - }, - - signature: function signature(credentials, datetime) { - var signingKey = v4Credentials.getSigningKey( - credentials, - datetime.substr(0, 8), - this.request.region, - this.serviceName, - this.signatureCache - ); - return AWS.util.crypto.hmac(signingKey, this.stringToSign(datetime), 'hex'); - }, - - stringToSign: function stringToSign(datetime) { - var parts = []; - parts.push('AWS4-HMAC-SHA256'); - parts.push(datetime); - parts.push(this.credentialString(datetime)); - parts.push(this.hexEncodedHash(this.canonicalString())); - return parts.join('\n'); - }, - - canonicalString: function canonicalString() { - var parts = [], pathname = this.request.pathname(); - if (this.serviceName !== 's3' && this.signatureVersion !== 's3v4') pathname = AWS.util.uriEscapePath(pathname); - - parts.push(this.request.method); - parts.push(pathname); - parts.push(this.request.search()); - parts.push(this.canonicalHeaders() + '\n'); - parts.push(this.signedHeaders()); - parts.push(this.hexEncodedBodyHash()); - return parts.join('\n'); - }, - - canonicalHeaders: function canonicalHeaders() { - var headers = []; - AWS.util.each.call(this, this.request.headers, function (key, item) { - headers.push([key, item]); - }); - headers.sort(function (a, b) { - return a[0].toLowerCase() < b[0].toLowerCase() ? -1 : 1; - }); - var parts = []; - AWS.util.arrayEach.call(this, headers, function (item) { - var key = item[0].toLowerCase(); - if (this.isSignableHeader(key)) { - var value = item[1]; - if (typeof value === 'undefined' || value === null || typeof value.toString !== 'function') { - throw AWS.util.error(new Error('Header ' + key + ' contains invalid value'), { - code: 'InvalidHeader' - }); - } - parts.push(key + ':' + - this.canonicalHeaderValues(value.toString())); - } - }); - return parts.join('\n'); - }, - - canonicalHeaderValues: function canonicalHeaderValues(values) { - return values.replace(/\s+/g, ' ').replace(/^\s+|\s+$/g, ''); - }, - - signedHeaders: function signedHeaders() { - var keys = []; - AWS.util.each.call(this, this.request.headers, function (key) { - key = key.toLowerCase(); - if (this.isSignableHeader(key)) keys.push(key); - }); - return keys.sort().join(';'); - }, - - credentialString: function credentialString(datetime) { - return v4Credentials.createScope( - datetime.substr(0, 8), - this.request.region, - this.serviceName - ); - }, - - hexEncodedHash: function hash(string) { - return AWS.util.crypto.sha256(string, 'hex'); - }, - - hexEncodedBodyHash: function hexEncodedBodyHash() { - var request = this.request; - if (this.isPresigned() && (['s3', 's3-object-lambda'].indexOf(this.serviceName) > -1) && !request.body) { - return 'UNSIGNED-PAYLOAD'; - } else if (request.headers['X-Amz-Content-Sha256']) { - return request.headers['X-Amz-Content-Sha256']; - } else { - return this.hexEncodedHash(this.request.body || ''); - } - }, - - unsignableHeaders: [ - 'authorization', - 'content-type', - 'content-length', - 'user-agent', - expiresHeader, - 'expect', - 'x-amzn-trace-id' - ], - - isSignableHeader: function isSignableHeader(key) { - if (key.toLowerCase().indexOf('x-amz-') === 0) return true; - return this.unsignableHeaders.indexOf(key) < 0; - }, - - isPresigned: function isPresigned() { - return this.request.headers[expiresHeader] ? true : false; - } - -}); - -/** - * @api private - */ -module.exports = AWS.Signers.V4; - - -/***/ }), - -/***/ 62660: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * @api private - */ -var cachedSecret = {}; - -/** - * @api private - */ -var cacheQueue = []; - -/** - * @api private - */ -var maxCacheEntries = 50; - -/** - * @api private - */ -var v4Identifier = 'aws4_request'; - -/** - * @api private - */ -module.exports = { - /** - * @api private - * - * @param date [String] - * @param region [String] - * @param serviceName [String] - * @return [String] - */ - createScope: function createScope(date, region, serviceName) { - return [ - date.substr(0, 8), - region, - serviceName, - v4Identifier - ].join('/'); - }, - - /** - * @api private - * - * @param credentials [Credentials] - * @param date [String] - * @param region [String] - * @param service [String] - * @param shouldCache [Boolean] - * @return [String] - */ - getSigningKey: function getSigningKey( - credentials, - date, - region, - service, - shouldCache - ) { - var credsIdentifier = AWS.util.crypto - .hmac(credentials.secretAccessKey, credentials.accessKeyId, 'base64'); - var cacheKey = [credsIdentifier, date, region, service].join('_'); - shouldCache = shouldCache !== false; - if (shouldCache && (cacheKey in cachedSecret)) { - return cachedSecret[cacheKey]; - } - - var kDate = AWS.util.crypto.hmac( - 'AWS4' + credentials.secretAccessKey, - date, - 'buffer' - ); - var kRegion = AWS.util.crypto.hmac(kDate, region, 'buffer'); - var kService = AWS.util.crypto.hmac(kRegion, service, 'buffer'); - - var signingKey = AWS.util.crypto.hmac(kService, v4Identifier, 'buffer'); - if (shouldCache) { - cachedSecret[cacheKey] = signingKey; - cacheQueue.push(cacheKey); - if (cacheQueue.length > maxCacheEntries) { - // remove the oldest entry (not the least recently used) - delete cachedSecret[cacheQueue.shift()]; - } - } - - return signingKey; - }, - - /** - * @api private - * - * Empties the derived signing key cache. Made available for testing purposes - * only. - */ - emptyCache: function emptyCache() { - cachedSecret = {}; - cacheQueue = []; - } -}; - - -/***/ }), - -/***/ 68118: -/***/ ((module) => { - -function AcceptorStateMachine(states, state) { - this.currentState = state || null; - this.states = states || {}; -} - -AcceptorStateMachine.prototype.runTo = function runTo(finalState, done, bindObject, inputError) { - if (typeof finalState === 'function') { - inputError = bindObject; bindObject = done; - done = finalState; finalState = null; - } - - var self = this; - var state = self.states[self.currentState]; - state.fn.call(bindObject || self, inputError, function(err) { - if (err) { - if (state.fail) self.currentState = state.fail; - else return done ? done.call(bindObject, err) : null; - } else { - if (state.accept) self.currentState = state.accept; - else return done ? done.call(bindObject) : null; - } - if (self.currentState === finalState) { - return done ? done.call(bindObject, err) : null; - } - - self.runTo(finalState, done, bindObject, err); - }); -}; - -AcceptorStateMachine.prototype.addState = function addState(name, acceptState, failState, fn) { - if (typeof acceptState === 'function') { - fn = acceptState; acceptState = null; failState = null; - } else if (typeof failState === 'function') { - fn = failState; failState = null; - } - - if (!this.currentState) this.currentState = name; - this.states[name] = { accept: acceptState, fail: failState, fn: fn }; - return this; -}; - -/** - * @api private - */ -module.exports = AcceptorStateMachine; - - -/***/ }), - -/***/ 82647: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * Represents AWS token object, which contains {token}, and optional - * {expireTime}. - * Creating a `Token` object allows you to pass around your - * token to configuration and service objects. - * - * Note that this class typically does not need to be constructed manually, - * as the {AWS.Config} and {AWS.Service} classes both accept simple - * options hashes with the two keys. The token from this object will be used - * automatically in operations which require them. - * - * ## Expiring and Refreshing Token - * - * Occasionally token can expire in the middle of a long-running - * application. In this case, the SDK will automatically attempt to - * refresh the token from the storage location if the Token - * class implements the {refresh} method. - * - * If you are implementing a token storage location, you - * will want to create a subclass of the `Token` class and - * override the {refresh} method. This method allows token to be - * retrieved from the backing store, be it a file system, database, or - * some network storage. The method should reset the token attributes - * on the object. - * - * @!attribute token - * @return [String] represents the literal token string. This will typically - * be a base64 encoded string. - * @!attribute expireTime - * @return [Date] a time when token should be considered expired. Used - * in conjunction with {expired}. - * @!attribute expired - * @return [Boolean] whether the token is expired and require a refresh. Used - * in conjunction with {expireTime}. - */ -AWS.Token = AWS.util.inherit({ - /** - * Creates a Token object with a given set of information in options hash. - * @option options token [String] represents the literal token string. - * @option options expireTime [Date] field representing the time at which - * the token expires. - * @example Create a token object - * var token = new AWS.Token({ token: 'token' }); - */ - constructor: function Token(options) { - // hide token from being displayed with util.inspect - AWS.util.hideProperties(this, ['token']); - - this.expired = false; - this.expireTime = null; - this.refreshCallbacks = []; - if (arguments.length === 1) { - var options = arguments[0]; - this.token = options.token; - this.expireTime = options.expireTime; - } - }, - - /** - * @return [Integer] the number of seconds before {expireTime} during which - * the token will be considered expired. - */ - expiryWindow: 15, - - /** - * @return [Boolean] whether the Token object should call {refresh} - * @note Subclasses should override this method to provide custom refresh - * logic. - */ - needsRefresh: function needsRefresh() { - var currentTime = AWS.util.date.getDate().getTime(); - var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); - - if (this.expireTime && adjustedTime > this.expireTime) - return true; - - return this.expired || !this.token; - }, - - /** - * Gets the existing token, refreshing them if they are not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload token when they are already - * loaded into the object. - * - * @callback callback function(err) - * When this callback is called with no error, it means either token - * do not need to be refreshed or refreshed token information has - * been loaded into the object (as the `token` property). - * @param err [Error] if an error occurred, this value will be filled - */ - get: function get(callback) { - var self = this; - if (this.needsRefresh()) { - this.refresh(function(err) { - if (!err) self.expired = false; // reset expired flag - if (callback) callback(err); - }); - } else if (callback) { - callback(); - } - }, - - /** - * @!method getPromise() - * Returns a 'thenable' promise. - * Gets the existing token, refreshing it if it's not yet loaded - * or have expired. Users should call this method before using {refresh}, - * as this will not attempt to reload token when it's already - * loaded into the object. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it means - * either token does not need to be refreshed or refreshed token information - * has been loaded into the object (as the `token` property). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled. - * @return [Promise] A promise that represents the state of the `get` call. - * @example Calling the `getPromise` method. - * var promise = tokenProvider.getPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * @!method refreshPromise() - * Returns a 'thenable' promise. - * Refreshes the token. Users should call {get} before attempting - * to forcibly refresh token. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function() - * Called if the promise is fulfilled. When this callback is called, it - * means refreshed token information has been loaded into the object - * (as the `token` property). - * @callback rejectedCallback function(err) - * Called if the promise is rejected. - * @param err [Error] if an error occurred, this value will be filled. - * @return [Promise] A promise that represents the state of the `refresh` call. - * @example Calling the `refreshPromise` method. - * var promise = tokenProvider.refreshPromise(); - * promise.then(function() { ... }, function(err) { ... }); - */ - - /** - * Refreshes the token. Users should call {get} before attempting - * to forcibly refresh token. - * - * @callback callback function(err) - * When this callback is called with no error, it means refreshed - * token information has been loaded into the object (as the - * `token` property). - * @param err [Error] if an error occurred, this value will be filled - * @note Subclasses should override this class to reset the - * {token} on the token object and then call the callback with - * any error information. - * @see get - */ - refresh: function refresh(callback) { - this.expired = false; - callback(); - }, - - /** - * @api private - * @param callback - */ - coalesceRefresh: function coalesceRefresh(callback, sync) { - var self = this; - if (self.refreshCallbacks.push(callback) === 1) { - self.load(function onLoad(err) { - AWS.util.arrayEach(self.refreshCallbacks, function(callback) { - if (sync) { - callback(err); - } else { - // callback could throw, so defer to ensure all callbacks are notified - AWS.util.defer(function () { - callback(err); - }); - } - }); - self.refreshCallbacks.length = 0; - }); - } - }, - - /** - * @api private - * @param callback - */ - load: function load(callback) { - callback(); - } -}); - -/** - * @api private - */ -AWS.Token.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); - this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); -}; - -/** - * @api private - */ -AWS.Token.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.getPromise; - delete this.prototype.refreshPromise; -}; - -AWS.util.addPromises(AWS.Token); - - -/***/ }), - -/***/ 90327: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var crypto = __nccwpck_require__(6113); -var fs = __nccwpck_require__(57147); -var path = __nccwpck_require__(71017); -var iniLoader = AWS.util.iniLoader; - -// Tracking refresh attempt to ensure refresh is not attempted more than once every 30 seconds. -var lastRefreshAttemptTime = 0; - -/** - * Throws error is key is not present in token object. - * - * @param token [Object] Object to be validated. - * @param key [String] The key to be validated on the object. - */ -var validateTokenKey = function validateTokenKey(token, key) { - if (!token[key]) { - throw AWS.util.error( - new Error('Key "' + key + '" not present in SSO Token'), - { code: 'SSOTokenProviderFailure' } - ); - } -}; - -/** - * Calls callback function with or without error based on provided times in case - * of unsuccessful refresh. - * - * @param currentTime [number] current time in milliseconds since ECMAScript epoch. - * @param tokenExpireTime [number] token expire time in milliseconds since ECMAScript epoch. - * @param callback [Function] Callback to call in case of error. - */ -var refreshUnsuccessful = function refreshUnsuccessful( - currentTime, - tokenExpireTime, - callback -) { - if (tokenExpireTime > currentTime) { - // Cached token is still valid, return. - callback(null); - } else { - // Token invalid, throw error requesting user to sso login. - throw AWS.util.error( - new Error('SSO Token refresh failed. Please log in using "aws sso login"'), - { code: 'SSOTokenProviderFailure' } - ); - } -}; - -/** - * Represents token loaded from disk derived from the AWS SSO device grant authorication flow. - * - * ## Using SSO Token Provider - * - * This provider is checked by default in the Node.js environment in TokenProviderChain. - * To use the SSO Token Provider, simply add your SSO Start URL and Region to the - * ~/.aws/config file in the following format: - * - * [default] - * sso_start_url = https://d-abc123.awsapps.com/start - * sso_region = us-east-1 - * - * ## Using custom profiles - * - * The SDK supports loading token for separate profiles. This can be done in two ways: - * - * 1. Set the `AWS_PROFILE` environment variable in your process prior to loading the SDK. - * 2. Directly load the AWS.SSOTokenProvider: - * - * ```javascript - * var ssoTokenProvider = new AWS.SSOTokenProvider({profile: 'myprofile'}); - * ``` - * - * @!macro nobrowser - */ -AWS.SSOTokenProvider = AWS.util.inherit(AWS.Token, { - /** - * Expiry window of five minutes. - */ - expiryWindow: 5 * 60, - - /** - * Creates a new token object from cached access token. - * - * @param options [map] a set of options - * @option options profile [String] (AWS_PROFILE env var or 'default') - * the name of the profile to load. - * @option options callback [Function] (err) Token is eagerly loaded - * by the constructor. When the callback is called with no error, the - * token has been loaded successfully. - */ - constructor: function SSOTokenProvider(options) { - AWS.Token.call(this); - - options = options || {}; - - this.expired = true; - this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; - this.get(options.callback || AWS.util.fn.noop); - }, - - /** - * Reads sso_start_url from provided profile, and reads token from - * ~/.aws/sso/cache/.json - * - * Throws an error if required fields token and expiresAt are missing. - * Throws an error if token has expired and metadata to perform refresh is - * not available. - * Attempts to refresh the token if it's within 5 minutes before expiry time. - * - * @api private - */ - load: function load(callback) { - var self = this; - var profiles = iniLoader.loadFrom({ isConfig: true }); - var profile = profiles[this.profile] || {}; - - if (Object.keys(profile).length === 0) { - throw AWS.util.error( - new Error('Profile "' + this.profile + '" not found'), - { code: 'SSOTokenProviderFailure' } - ); - } else if (!profile['sso_session']) { - throw AWS.util.error( - new Error('Profile "' + profileName + '" is missing required property "sso_session".'), - { code: 'SSOTokenProviderFailure' } - ); - } - - var ssoSessionName = profile['sso_session']; - var ssoSessions = iniLoader.loadSsoSessionsFrom(); - var ssoSession = ssoSessions[ssoSessionName]; - - if (!ssoSession) { - throw AWS.util.error( - new Error('Sso session "' + ssoSessionName + '" not found'), - { code: 'SSOTokenProviderFailure' } - ); - } else if (!ssoSession['sso_start_url']) { - throw AWS.util.error( - new Error('Sso session "' + profileName + '" is missing required property "sso_start_url".'), - { code: 'SSOTokenProviderFailure' } - ); - } else if (!ssoSession['sso_region']) { - throw AWS.util.error( - new Error('Sso session "' + profileName + '" is missing required property "sso_region".'), - { code: 'SSOTokenProviderFailure' } - ); - } - - var hasher = crypto.createHash('sha1'); - var fileName = hasher.update(ssoSessionName).digest('hex') + '.json'; - var cachePath = path.join(iniLoader.getHomeDir(), '.aws', 'sso', 'cache', fileName); - var tokenFromCache = JSON.parse(fs.readFileSync(cachePath)); - - if (!tokenFromCache) { - throw AWS.util.error( - new Error('Cached token not found. Please log in using "aws sso login"' - + ' for profile "' + this.profile + '".'), - { code: 'SSOTokenProviderFailure' } - ); - } - - validateTokenKey(tokenFromCache, 'accessToken'); - validateTokenKey(tokenFromCache, 'expiresAt'); - - var currentTime = AWS.util.date.getDate().getTime(); - var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); - var tokenExpireTime = new Date(tokenFromCache['expiresAt']); - - if (tokenExpireTime > adjustedTime) { - // Token is valid and not expired. - self.token = tokenFromCache.accessToken; - self.expireTime = tokenExpireTime; - self.expired = false; - callback(null); - return; - } - - // Skip new refresh, if last refresh was done within 30 seconds. - if (currentTime - lastRefreshAttemptTime < 30 * 1000) { - refreshUnsuccessful(currentTime, tokenExpireTime, callback); - return; - } - - // Token is in expiry window, refresh from SSOOIDC.createToken() call. - validateTokenKey(tokenFromCache, 'clientId'); - validateTokenKey(tokenFromCache, 'clientSecret'); - validateTokenKey(tokenFromCache, 'refreshToken'); - - if (!self.service || self.service.config.region !== ssoSession.sso_region) { - self.service = new AWS.SSOOIDC({ region: ssoSession.sso_region }); - } - - var params = { - clientId: tokenFromCache.clientId, - clientSecret: tokenFromCache.clientSecret, - refreshToken: tokenFromCache.refreshToken, - grantType: 'refresh_token', - }; - - lastRefreshAttemptTime = AWS.util.date.getDate().getTime(); - self.service.createToken(params, function(err, data) { - if (err || !data) { - refreshUnsuccessful(currentTime, tokenExpireTime, callback); - } else { - try { - validateTokenKey(data, 'accessToken'); - validateTokenKey(data, 'expiresIn'); - self.expired = false; - self.token = data.accessToken; - self.expireTime = new Date(Date.now() + data.expiresIn * 1000); - callback(null); - - try { - // Write updated token data to disk. - tokenFromCache.accessToken = data.accessToken; - tokenFromCache.expiresAt = self.expireTime.toISOString(); - tokenFromCache.refreshToken = data.refreshToken; - fs.writeFileSync(cachePath, JSON.stringify(tokenFromCache, null, 2)); - } catch (error) { - // Swallow error if unable to write token to file. - } - } catch (error) { - refreshUnsuccessful(currentTime, tokenExpireTime, callback); - } - } - }); - }, - - /** - * Loads the cached access token from disk. - * - * @callback callback function(err) - * Called after the AWS SSO process has been executed. When this - * callback is called with no error, it means that the token information - * has been loaded into the object (as the `token` property). - * @param err [Error] if an error occurred, this value will be filled. - * @see get - */ - refresh: function refresh(callback) { - iniLoader.clearCachedFiles(); - this.coalesceRefresh(callback || AWS.util.fn.callback); - }, -}); - - -/***/ }), - -/***/ 50126: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); - -/** - * Creates a token provider chain that searches for token in a list of - * token providers specified by the {providers} property. - * - * By default, the chain will use the {defaultProviders} to resolve token. - * - * ## Setting Providers - * - * Each provider in the {providers} list should be a function that returns - * a {AWS.Token} object, or a hardcoded token object. The function - * form allows for delayed execution of the Token construction. - * - * ## Resolving Token from a Chain - * - * Call {resolve} to return the first valid token object that can be - * loaded by the provider chain. - * - * For example, to resolve a chain with a custom provider that checks a file - * on disk after the set of {defaultProviders}: - * - * ```javascript - * var diskProvider = new FileTokenProvider('./token.json'); - * var chain = new AWS.TokenProviderChain(); - * chain.providers.push(diskProvider); - * chain.resolve(); - * ``` - * - * The above code will return the `diskProvider` object if the - * file contains token and the `defaultProviders` do not contain - * any token. - * - * @!attribute providers - * @return [Array] - * a list of token objects or functions that return token - * objects. If the provider is a function, the function will be - * executed lazily when the provider needs to be checked for valid - * token. By default, this object will be set to the {defaultProviders}. - * @see defaultProviders - */ -AWS.TokenProviderChain = AWS.util.inherit(AWS.Token, { - - /** - * Creates a new TokenProviderChain with a default set of providers - * specified by {defaultProviders}. - */ - constructor: function TokenProviderChain(providers) { - if (providers) { - this.providers = providers; - } else { - this.providers = AWS.TokenProviderChain.defaultProviders.slice(0); - } - this.resolveCallbacks = []; - }, - - /** - * @!method resolvePromise() - * Returns a 'thenable' promise. - * Resolves the provider chain by searching for the first token in {providers}. - * - * Two callbacks can be provided to the `then` method on the returned promise. - * The first callback will be called if the promise is fulfilled, and the second - * callback will be called if the promise is rejected. - * @callback fulfilledCallback function(token) - * Called if the promise is fulfilled and the provider resolves the chain - * to a token object - * @param token [AWS.Token] the token object resolved by the provider chain. - * @callback rejectedCallback function(error) - * Called if the promise is rejected. - * @param err [Error] the error object returned if no token is found. - * @return [Promise] A promise that represents the state of the `resolve` method call. - * @example Calling the `resolvePromise` method. - * var promise = chain.resolvePromise(); - * promise.then(function(token) { ... }, function(err) { ... }); - */ - - /** - * Resolves the provider chain by searching for the first token in {providers}. - * - * @callback callback function(err, token) - * Called when the provider resolves the chain to a token object - * or null if no token can be found. - * - * @param err [Error] the error object returned if no token is found. - * @param token [AWS.Token] the token object resolved by the provider chain. - * @return [AWS.TokenProviderChain] the provider, for chaining. - */ - resolve: function resolve(callback) { - var self = this; - if (self.providers.length === 0) { - callback(new Error('No providers')); - return self; - } - - if (self.resolveCallbacks.push(callback) === 1) { - var index = 0; - var providers = self.providers.slice(0); - - function resolveNext(err, token) { - if ((!err && token) || index === providers.length) { - AWS.util.arrayEach(self.resolveCallbacks, function (callback) { - callback(err, token); - }); - self.resolveCallbacks.length = 0; - return; - } - - var provider = providers[index++]; - if (typeof provider === 'function') { - token = provider.call(); - } else { - token = provider; - } - - if (token.get) { - token.get(function (getErr) { - resolveNext(getErr, getErr ? null : token); - }); - } else { - resolveNext(null, token); - } - } - - resolveNext(); - } - - return self; - } -}); - -/** - * The default set of providers used by a vanilla TokenProviderChain. - * - * In the browser: - * - * ```javascript - * AWS.TokenProviderChain.defaultProviders = [] - * ``` - * - * In Node.js: - * - * ```javascript - * AWS.TokenProviderChain.defaultProviders = [ - * function () { return new AWS.SSOTokenProvider(); }, - * ] - * ``` - */ -AWS.TokenProviderChain.defaultProviders = []; - -/** - * @api private - */ -AWS.TokenProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { - this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); -}; - -/** - * @api private - */ -AWS.TokenProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { - delete this.prototype.resolvePromise; -}; - -AWS.util.addPromises(AWS.TokenProviderChain); - - -/***/ }), - -/***/ 77985: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/* eslint guard-for-in:0 */ -var AWS; - -/** - * A set of utility methods for use with the AWS SDK. - * - * @!attribute abort - * Return this value from an iterator function {each} or {arrayEach} - * to break out of the iteration. - * @example Breaking out of an iterator function - * AWS.util.each({a: 1, b: 2, c: 3}, function(key, value) { - * if (key == 'b') return AWS.util.abort; - * }); - * @see each - * @see arrayEach - * @api private - */ -var util = { - environment: 'nodejs', - engine: function engine() { - if (util.isBrowser() && typeof navigator !== 'undefined') { - return navigator.userAgent; - } else { - var engine = process.platform + '/' + process.version; - if (process.env.AWS_EXECUTION_ENV) { - engine += ' exec-env/' + process.env.AWS_EXECUTION_ENV; - } - return engine; - } - }, - - userAgent: function userAgent() { - var name = util.environment; - var agent = 'aws-sdk-' + name + '/' + (__nccwpck_require__(28437).VERSION); - if (name === 'nodejs') agent += ' ' + util.engine(); - return agent; - }, - - uriEscape: function uriEscape(string) { - var output = encodeURIComponent(string); - output = output.replace(/[^A-Za-z0-9_.~\-%]+/g, escape); - - // AWS percent-encodes some extra non-standard characters in a URI - output = output.replace(/[*]/g, function(ch) { - return '%' + ch.charCodeAt(0).toString(16).toUpperCase(); - }); - - return output; - }, - - uriEscapePath: function uriEscapePath(string) { - var parts = []; - util.arrayEach(string.split('/'), function (part) { - parts.push(util.uriEscape(part)); - }); - return parts.join('/'); - }, - - urlParse: function urlParse(url) { - return util.url.parse(url); - }, - - urlFormat: function urlFormat(url) { - return util.url.format(url); - }, - - queryStringParse: function queryStringParse(qs) { - return util.querystring.parse(qs); - }, - - queryParamsToString: function queryParamsToString(params) { - var items = []; - var escape = util.uriEscape; - var sortedKeys = Object.keys(params).sort(); - - util.arrayEach(sortedKeys, function(name) { - var value = params[name]; - var ename = escape(name); - var result = ename + '='; - if (Array.isArray(value)) { - var vals = []; - util.arrayEach(value, function(item) { vals.push(escape(item)); }); - result = ename + '=' + vals.sort().join('&' + ename + '='); - } else if (value !== undefined && value !== null) { - result = ename + '=' + escape(value); - } - items.push(result); - }); - - return items.join('&'); - }, - - readFileSync: function readFileSync(path) { - if (util.isBrowser()) return null; - return (__nccwpck_require__(57147).readFileSync)(path, 'utf-8'); - }, - - base64: { - encode: function encode64(string) { - if (typeof string === 'number') { - throw util.error(new Error('Cannot base64 encode number ' + string)); - } - if (string === null || typeof string === 'undefined') { - return string; - } - var buf = util.buffer.toBuffer(string); - return buf.toString('base64'); - }, - - decode: function decode64(string) { - if (typeof string === 'number') { - throw util.error(new Error('Cannot base64 decode number ' + string)); - } - if (string === null || typeof string === 'undefined') { - return string; - } - return util.buffer.toBuffer(string, 'base64'); - } - - }, - - buffer: { - /** - * Buffer constructor for Node buffer and buffer pollyfill - */ - toBuffer: function(data, encoding) { - return (typeof util.Buffer.from === 'function' && util.Buffer.from !== Uint8Array.from) ? - util.Buffer.from(data, encoding) : new util.Buffer(data, encoding); - }, - - alloc: function(size, fill, encoding) { - if (typeof size !== 'number') { - throw new Error('size passed to alloc must be a number.'); - } - if (typeof util.Buffer.alloc === 'function') { - return util.Buffer.alloc(size, fill, encoding); - } else { - var buf = new util.Buffer(size); - if (fill !== undefined && typeof buf.fill === 'function') { - buf.fill(fill, undefined, undefined, encoding); - } - return buf; - } - }, - - toStream: function toStream(buffer) { - if (!util.Buffer.isBuffer(buffer)) buffer = util.buffer.toBuffer(buffer); - - var readable = new (util.stream.Readable)(); - var pos = 0; - readable._read = function(size) { - if (pos >= buffer.length) return readable.push(null); - - var end = pos + size; - if (end > buffer.length) end = buffer.length; - readable.push(buffer.slice(pos, end)); - pos = end; - }; - - return readable; - }, - - /** - * Concatenates a list of Buffer objects. - */ - concat: function(buffers) { - var length = 0, - offset = 0, - buffer = null, i; - - for (i = 0; i < buffers.length; i++) { - length += buffers[i].length; - } - - buffer = util.buffer.alloc(length); - - for (i = 0; i < buffers.length; i++) { - buffers[i].copy(buffer, offset); - offset += buffers[i].length; - } - - return buffer; - } - }, - - string: { - byteLength: function byteLength(string) { - if (string === null || string === undefined) return 0; - if (typeof string === 'string') string = util.buffer.toBuffer(string); - - if (typeof string.byteLength === 'number') { - return string.byteLength; - } else if (typeof string.length === 'number') { - return string.length; - } else if (typeof string.size === 'number') { - return string.size; - } else if (typeof string.path === 'string') { - return (__nccwpck_require__(57147).lstatSync)(string.path).size; - } else { - throw util.error(new Error('Cannot determine length of ' + string), - { object: string }); - } - }, - - upperFirst: function upperFirst(string) { - return string[0].toUpperCase() + string.substr(1); - }, - - lowerFirst: function lowerFirst(string) { - return string[0].toLowerCase() + string.substr(1); - } - }, - - ini: { - parse: function string(ini) { - var currentSection, map = {}; - util.arrayEach(ini.split(/\r?\n/), function(line) { - line = line.split(/(^|\s)[;#]/)[0].trim(); // remove comments and trim - var isSection = line[0] === '[' && line[line.length - 1] === ']'; - if (isSection) { - currentSection = line.substring(1, line.length - 1); - if (currentSection === '__proto__' || currentSection.split(/\s/)[1] === '__proto__') { - throw util.error( - new Error('Cannot load profile name \'' + currentSection + '\' from shared ini file.') - ); - } - } else if (currentSection) { - var indexOfEqualsSign = line.indexOf('='); - var start = 0; - var end = line.length - 1; - var isAssignment = - indexOfEqualsSign !== -1 && indexOfEqualsSign !== start && indexOfEqualsSign !== end; - - if (isAssignment) { - var name = line.substring(0, indexOfEqualsSign).trim(); - var value = line.substring(indexOfEqualsSign + 1).trim(); - - map[currentSection] = map[currentSection] || {}; - map[currentSection][name] = value; - } - } - }); - - return map; - } - }, - - fn: { - noop: function() {}, - callback: function (err) { if (err) throw err; }, - - /** - * Turn a synchronous function into as "async" function by making it call - * a callback. The underlying function is called with all but the last argument, - * which is treated as the callback. The callback is passed passed a first argument - * of null on success to mimick standard node callbacks. - */ - makeAsync: function makeAsync(fn, expectedArgs) { - if (expectedArgs && expectedArgs <= fn.length) { - return fn; - } - - return function() { - var args = Array.prototype.slice.call(arguments, 0); - var callback = args.pop(); - var result = fn.apply(null, args); - callback(result); - }; - } - }, - - /** - * Date and time utility functions. - */ - date: { - - /** - * @return [Date] the current JavaScript date object. Since all - * AWS services rely on this date object, you can override - * this function to provide a special time value to AWS service - * requests. - */ - getDate: function getDate() { - if (!AWS) AWS = __nccwpck_require__(28437); - if (AWS.config.systemClockOffset) { // use offset when non-zero - return new Date(new Date().getTime() + AWS.config.systemClockOffset); - } else { - return new Date(); - } - }, - - /** - * @return [String] the date in ISO-8601 format - */ - iso8601: function iso8601(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.toISOString().replace(/\.\d{3}Z$/, 'Z'); - }, - - /** - * @return [String] the date in RFC 822 format - */ - rfc822: function rfc822(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.toUTCString(); - }, - - /** - * @return [Integer] the UNIX timestamp value for the current time - */ - unixTimestamp: function unixTimestamp(date) { - if (date === undefined) { date = util.date.getDate(); } - return date.getTime() / 1000; - }, - - /** - * @param [String,number,Date] date - * @return [Date] - */ - from: function format(date) { - if (typeof date === 'number') { - return new Date(date * 1000); // unix timestamp - } else { - return new Date(date); - } - }, - - /** - * Given a Date or date-like value, this function formats the - * date into a string of the requested value. - * @param [String,number,Date] date - * @param [String] formatter Valid formats are: - # * 'iso8601' - # * 'rfc822' - # * 'unixTimestamp' - * @return [String] - */ - format: function format(date, formatter) { - if (!formatter) formatter = 'iso8601'; - return util.date[formatter](util.date.from(date)); - }, - - parseTimestamp: function parseTimestamp(value) { - if (typeof value === 'number') { // unix timestamp (number) - return new Date(value * 1000); - } else if (value.match(/^\d+$/)) { // unix timestamp - return new Date(value * 1000); - } else if (value.match(/^\d{4}/)) { // iso8601 - return new Date(value); - } else if (value.match(/^\w{3},/)) { // rfc822 - return new Date(value); - } else { - throw util.error( - new Error('unhandled timestamp format: ' + value), - {code: 'TimestampParserError'}); - } - } - - }, - - crypto: { - crc32Table: [ - 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, - 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, - 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, - 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, - 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, - 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, - 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, - 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, - 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, - 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, - 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, - 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, - 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, - 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, - 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, - 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, - 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, - 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, - 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, - 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, - 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, - 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, - 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, - 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, - 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, - 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, - 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, - 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, - 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, - 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, - 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, - 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, - 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, - 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, - 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, - 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, - 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, - 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, - 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, - 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, - 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, - 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, - 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, - 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, - 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, - 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, - 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, - 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, - 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, - 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, - 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, - 0x2D02EF8D], - - crc32: function crc32(data) { - var tbl = util.crypto.crc32Table; - var crc = 0 ^ -1; - - if (typeof data === 'string') { - data = util.buffer.toBuffer(data); - } - - for (var i = 0; i < data.length; i++) { - var code = data.readUInt8(i); - crc = (crc >>> 8) ^ tbl[(crc ^ code) & 0xFF]; - } - return (crc ^ -1) >>> 0; - }, - - hmac: function hmac(key, string, digest, fn) { - if (!digest) digest = 'binary'; - if (digest === 'buffer') { digest = undefined; } - if (!fn) fn = 'sha256'; - if (typeof string === 'string') string = util.buffer.toBuffer(string); - return util.crypto.lib.createHmac(fn, key).update(string).digest(digest); - }, - - md5: function md5(data, digest, callback) { - return util.crypto.hash('md5', data, digest, callback); - }, - - sha256: function sha256(data, digest, callback) { - return util.crypto.hash('sha256', data, digest, callback); - }, - - hash: function(algorithm, data, digest, callback) { - var hash = util.crypto.createHash(algorithm); - if (!digest) { digest = 'binary'; } - if (digest === 'buffer') { digest = undefined; } - if (typeof data === 'string') data = util.buffer.toBuffer(data); - var sliceFn = util.arraySliceFn(data); - var isBuffer = util.Buffer.isBuffer(data); - //Identifying objects with an ArrayBuffer as buffers - if (util.isBrowser() && typeof ArrayBuffer !== 'undefined' && data && data.buffer instanceof ArrayBuffer) isBuffer = true; - - if (callback && typeof data === 'object' && - typeof data.on === 'function' && !isBuffer) { - data.on('data', function(chunk) { hash.update(chunk); }); - data.on('error', function(err) { callback(err); }); - data.on('end', function() { callback(null, hash.digest(digest)); }); - } else if (callback && sliceFn && !isBuffer && - typeof FileReader !== 'undefined') { - // this might be a File/Blob - var index = 0, size = 1024 * 512; - var reader = new FileReader(); - reader.onerror = function() { - callback(new Error('Failed to read data.')); - }; - reader.onload = function() { - var buf = new util.Buffer(new Uint8Array(reader.result)); - hash.update(buf); - index += buf.length; - reader._continueReading(); - }; - reader._continueReading = function() { - if (index >= data.size) { - callback(null, hash.digest(digest)); - return; - } - - var back = index + size; - if (back > data.size) back = data.size; - reader.readAsArrayBuffer(sliceFn.call(data, index, back)); - }; - - reader._continueReading(); - } else { - if (util.isBrowser() && typeof data === 'object' && !isBuffer) { - data = new util.Buffer(new Uint8Array(data)); - } - var out = hash.update(data).digest(digest); - if (callback) callback(null, out); - return out; - } - }, - - toHex: function toHex(data) { - var out = []; - for (var i = 0; i < data.length; i++) { - out.push(('0' + data.charCodeAt(i).toString(16)).substr(-2, 2)); - } - return out.join(''); - }, - - createHash: function createHash(algorithm) { - return util.crypto.lib.createHash(algorithm); - } - - }, - - /** @!ignore */ - - /* Abort constant */ - abort: {}, - - each: function each(object, iterFunction) { - for (var key in object) { - if (Object.prototype.hasOwnProperty.call(object, key)) { - var ret = iterFunction.call(this, key, object[key]); - if (ret === util.abort) break; - } - } - }, - - arrayEach: function arrayEach(array, iterFunction) { - for (var idx in array) { - if (Object.prototype.hasOwnProperty.call(array, idx)) { - var ret = iterFunction.call(this, array[idx], parseInt(idx, 10)); - if (ret === util.abort) break; - } - } - }, - - update: function update(obj1, obj2) { - util.each(obj2, function iterator(key, item) { - obj1[key] = item; - }); - return obj1; - }, - - merge: function merge(obj1, obj2) { - return util.update(util.copy(obj1), obj2); - }, - - copy: function copy(object) { - if (object === null || object === undefined) return object; - var dupe = {}; - // jshint forin:false - for (var key in object) { - dupe[key] = object[key]; - } - return dupe; - }, - - isEmpty: function isEmpty(obj) { - for (var prop in obj) { - if (Object.prototype.hasOwnProperty.call(obj, prop)) { - return false; - } - } - return true; - }, - - arraySliceFn: function arraySliceFn(obj) { - var fn = obj.slice || obj.webkitSlice || obj.mozSlice; - return typeof fn === 'function' ? fn : null; - }, - - isType: function isType(obj, type) { - // handle cross-"frame" objects - if (typeof type === 'function') type = util.typeName(type); - return Object.prototype.toString.call(obj) === '[object ' + type + ']'; - }, - - typeName: function typeName(type) { - if (Object.prototype.hasOwnProperty.call(type, 'name')) return type.name; - var str = type.toString(); - var match = str.match(/^\s*function (.+)\(/); - return match ? match[1] : str; - }, - - error: function error(err, options) { - var originalError = null; - if (typeof err.message === 'string' && err.message !== '') { - if (typeof options === 'string' || (options && options.message)) { - originalError = util.copy(err); - originalError.message = err.message; - } - } - err.message = err.message || null; - - if (typeof options === 'string') { - err.message = options; - } else if (typeof options === 'object' && options !== null) { - util.update(err, options); - if (options.message) - err.message = options.message; - if (options.code || options.name) - err.code = options.code || options.name; - if (options.stack) - err.stack = options.stack; - } - - if (typeof Object.defineProperty === 'function') { - Object.defineProperty(err, 'name', {writable: true, enumerable: false}); - Object.defineProperty(err, 'message', {enumerable: true}); - } - - err.name = String(options && options.name || err.name || err.code || 'Error'); - err.time = new Date(); - - if (originalError) { - err.originalError = originalError; - } - - - for (var key in options || {}) { - if (key[0] === '[' && key[key.length - 1] === ']') { - key = key.slice(1, -1); - if (key === 'code' || key === 'message') { - continue; - } - err['[' + key + ']'] = 'See error.' + key + ' for details.'; - Object.defineProperty(err, key, { - value: err[key] || (options && options[key]) || (originalError && originalError[key]), - enumerable: false, - writable: true - }); - } - } - - return err; - }, - - /** - * @api private - */ - inherit: function inherit(klass, features) { - var newObject = null; - if (features === undefined) { - features = klass; - klass = Object; - newObject = {}; - } else { - var ctor = function ConstructorWrapper() {}; - ctor.prototype = klass.prototype; - newObject = new ctor(); - } - - // constructor not supplied, create pass-through ctor - if (features.constructor === Object) { - features.constructor = function() { - if (klass !== Object) { - return klass.apply(this, arguments); - } - }; - } - - features.constructor.prototype = newObject; - util.update(features.constructor.prototype, features); - features.constructor.__super__ = klass; - return features.constructor; - }, - - /** - * @api private - */ - mixin: function mixin() { - var klass = arguments[0]; - for (var i = 1; i < arguments.length; i++) { - // jshint forin:false - for (var prop in arguments[i].prototype) { - var fn = arguments[i].prototype[prop]; - if (prop !== 'constructor') { - klass.prototype[prop] = fn; - } - } - } - return klass; - }, - - /** - * @api private - */ - hideProperties: function hideProperties(obj, props) { - if (typeof Object.defineProperty !== 'function') return; - - util.arrayEach(props, function (key) { - Object.defineProperty(obj, key, { - enumerable: false, writable: true, configurable: true }); - }); - }, - - /** - * @api private - */ - property: function property(obj, name, value, enumerable, isValue) { - var opts = { - configurable: true, - enumerable: enumerable !== undefined ? enumerable : true - }; - if (typeof value === 'function' && !isValue) { - opts.get = value; - } - else { - opts.value = value; opts.writable = true; - } - - Object.defineProperty(obj, name, opts); - }, - - /** - * @api private - */ - memoizedProperty: function memoizedProperty(obj, name, get, enumerable) { - var cachedValue = null; - - // build enumerable attribute for each value with lazy accessor. - util.property(obj, name, function() { - if (cachedValue === null) { - cachedValue = get(); - } - return cachedValue; - }, enumerable); - }, - - /** - * TODO Remove in major version revision - * This backfill populates response data without the - * top-level payload name. - * - * @api private - */ - hoistPayloadMember: function hoistPayloadMember(resp) { - var req = resp.request; - var operationName = req.operation; - var operation = req.service.api.operations[operationName]; - var output = operation.output; - if (output.payload && !operation.hasEventOutput) { - var payloadMember = output.members[output.payload]; - var responsePayload = resp.data[output.payload]; - if (payloadMember.type === 'structure') { - util.each(responsePayload, function(key, value) { - util.property(resp.data, key, value, false); - }); - } - } - }, - - /** - * Compute SHA-256 checksums of streams - * - * @api private - */ - computeSha256: function computeSha256(body, done) { - if (util.isNode()) { - var Stream = util.stream.Stream; - var fs = __nccwpck_require__(57147); - if (typeof Stream === 'function' && body instanceof Stream) { - if (typeof body.path === 'string') { // assume file object - var settings = {}; - if (typeof body.start === 'number') { - settings.start = body.start; - } - if (typeof body.end === 'number') { - settings.end = body.end; - } - body = fs.createReadStream(body.path, settings); - } else { // TODO support other stream types - return done(new Error('Non-file stream objects are ' + - 'not supported with SigV4')); - } - } - } - - util.crypto.sha256(body, 'hex', function(err, sha) { - if (err) done(err); - else done(null, sha); - }); - }, - - /** - * @api private - */ - isClockSkewed: function isClockSkewed(serverTime) { - if (serverTime) { - util.property(AWS.config, 'isClockSkewed', - Math.abs(new Date().getTime() - serverTime) >= 300000, false); - return AWS.config.isClockSkewed; - } - }, - - applyClockOffset: function applyClockOffset(serverTime) { - if (serverTime) - AWS.config.systemClockOffset = serverTime - new Date().getTime(); - }, - - /** - * @api private - */ - extractRequestId: function extractRequestId(resp) { - var requestId = resp.httpResponse.headers['x-amz-request-id'] || - resp.httpResponse.headers['x-amzn-requestid']; - - if (!requestId && resp.data && resp.data.ResponseMetadata) { - requestId = resp.data.ResponseMetadata.RequestId; - } - - if (requestId) { - resp.requestId = requestId; - } - - if (resp.error) { - resp.error.requestId = requestId; - } - }, - - /** - * @api private - */ - addPromises: function addPromises(constructors, PromiseDependency) { - var deletePromises = false; - if (PromiseDependency === undefined && AWS && AWS.config) { - PromiseDependency = AWS.config.getPromisesDependency(); - } - if (PromiseDependency === undefined && typeof Promise !== 'undefined') { - PromiseDependency = Promise; - } - if (typeof PromiseDependency !== 'function') deletePromises = true; - if (!Array.isArray(constructors)) constructors = [constructors]; - - for (var ind = 0; ind < constructors.length; ind++) { - var constructor = constructors[ind]; - if (deletePromises) { - if (constructor.deletePromisesFromClass) { - constructor.deletePromisesFromClass(); - } - } else if (constructor.addPromisesToClass) { - constructor.addPromisesToClass(PromiseDependency); - } - } - }, - - /** - * @api private - * Return a function that will return a promise whose fate is decided by the - * callback behavior of the given method with `methodName`. The method to be - * promisified should conform to node.js convention of accepting a callback as - * last argument and calling that callback with error as the first argument - * and success value on the second argument. - */ - promisifyMethod: function promisifyMethod(methodName, PromiseDependency) { - return function promise() { - var self = this; - var args = Array.prototype.slice.call(arguments); - return new PromiseDependency(function(resolve, reject) { - args.push(function(err, data) { - if (err) { - reject(err); - } else { - resolve(data); - } - }); - self[methodName].apply(self, args); - }); - }; - }, - - /** - * @api private - */ - isDualstackAvailable: function isDualstackAvailable(service) { - if (!service) return false; - var metadata = __nccwpck_require__(17752); - if (typeof service !== 'string') service = service.serviceIdentifier; - if (typeof service !== 'string' || !metadata.hasOwnProperty(service)) return false; - return !!metadata[service].dualstackAvailable; - }, - - /** - * @api private - */ - calculateRetryDelay: function calculateRetryDelay(retryCount, retryDelayOptions, err) { - if (!retryDelayOptions) retryDelayOptions = {}; - var customBackoff = retryDelayOptions.customBackoff || null; - if (typeof customBackoff === 'function') { - return customBackoff(retryCount, err); - } - var base = typeof retryDelayOptions.base === 'number' ? retryDelayOptions.base : 100; - var delay = Math.random() * (Math.pow(2, retryCount) * base); - return delay; - }, - - /** - * @api private - */ - handleRequestWithRetries: function handleRequestWithRetries(httpRequest, options, cb) { - if (!options) options = {}; - var http = AWS.HttpClient.getInstance(); - var httpOptions = options.httpOptions || {}; - var retryCount = 0; - - var errCallback = function(err) { - var maxRetries = options.maxRetries || 0; - if (err && err.code === 'TimeoutError') err.retryable = true; - - // Call `calculateRetryDelay()` only when relevant, see #3401 - if (err && err.retryable && retryCount < maxRetries) { - var delay = util.calculateRetryDelay(retryCount, options.retryDelayOptions, err); - if (delay >= 0) { - retryCount++; - setTimeout(sendRequest, delay + (err.retryAfter || 0)); - return; - } - } - cb(err); - }; - - var sendRequest = function() { - var data = ''; - http.handleRequest(httpRequest, httpOptions, function(httpResponse) { - httpResponse.on('data', function(chunk) { data += chunk.toString(); }); - httpResponse.on('end', function() { - var statusCode = httpResponse.statusCode; - if (statusCode < 300) { - cb(null, data); - } else { - var retryAfter = parseInt(httpResponse.headers['retry-after'], 10) * 1000 || 0; - var err = util.error(new Error(), - { - statusCode: statusCode, - retryable: statusCode >= 500 || statusCode === 429 - } - ); - if (retryAfter && err.retryable) err.retryAfter = retryAfter; - errCallback(err); - } - }); - }, errCallback); - }; - - AWS.util.defer(sendRequest); - }, - - /** - * @api private - */ - uuid: { - v4: function uuidV4() { - return (__nccwpck_require__(57821).v4)(); - } - }, - - /** - * @api private - */ - convertPayloadToString: function convertPayloadToString(resp) { - var req = resp.request; - var operation = req.operation; - var rules = req.service.api.operations[operation].output || {}; - if (rules.payload && resp.data[rules.payload]) { - resp.data[rules.payload] = resp.data[rules.payload].toString(); - } - }, - - /** - * @api private - */ - defer: function defer(callback) { - if (typeof process === 'object' && typeof process.nextTick === 'function') { - process.nextTick(callback); - } else if (typeof setImmediate === 'function') { - setImmediate(callback); - } else { - setTimeout(callback, 0); - } - }, - - /** - * @api private - */ - getRequestPayloadShape: function getRequestPayloadShape(req) { - var operations = req.service.api.operations; - if (!operations) return undefined; - var operation = (operations || {})[req.operation]; - if (!operation || !operation.input || !operation.input.payload) return undefined; - return operation.input.members[operation.input.payload]; - }, - - getProfilesFromSharedConfig: function getProfilesFromSharedConfig(iniLoader, filename) { - var profiles = {}; - var profilesFromConfig = {}; - if (process.env[util.configOptInEnv]) { - var profilesFromConfig = iniLoader.loadFrom({ - isConfig: true, - filename: process.env[util.sharedConfigFileEnv] - }); - } - var profilesFromCreds= {}; - try { - var profilesFromCreds = iniLoader.loadFrom({ - filename: filename || - (process.env[util.configOptInEnv] && process.env[util.sharedCredentialsFileEnv]) - }); - } catch (error) { - // if using config, assume it is fully descriptive without a credentials file: - if (!process.env[util.configOptInEnv]) throw error; - } - for (var i = 0, profileNames = Object.keys(profilesFromConfig); i < profileNames.length; i++) { - profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromConfig[profileNames[i]]); - } - for (var i = 0, profileNames = Object.keys(profilesFromCreds); i < profileNames.length; i++) { - profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromCreds[profileNames[i]]); - } - return profiles; - - /** - * Roughly the semantics of `Object.assign(target, source)` - */ - function objectAssign(target, source) { - for (var i = 0, keys = Object.keys(source); i < keys.length; i++) { - target[keys[i]] = source[keys[i]]; - } - return target; - } - }, - - /** - * @api private - */ - ARN: { - validate: function validateARN(str) { - return str && str.indexOf('arn:') === 0 && str.split(':').length >= 6; - }, - parse: function parseARN(arn) { - var matched = arn.split(':'); - return { - partition: matched[1], - service: matched[2], - region: matched[3], - accountId: matched[4], - resource: matched.slice(5).join(':') - }; - }, - build: function buildARN(arnObject) { - if ( - arnObject.service === undefined || - arnObject.region === undefined || - arnObject.accountId === undefined || - arnObject.resource === undefined - ) throw util.error(new Error('Input ARN object is invalid')); - return 'arn:'+ (arnObject.partition || 'aws') + ':' + arnObject.service + - ':' + arnObject.region + ':' + arnObject.accountId + ':' + arnObject.resource; - } - }, - - /** - * @api private - */ - defaultProfile: 'default', - - /** - * @api private - */ - configOptInEnv: 'AWS_SDK_LOAD_CONFIG', - - /** - * @api private - */ - sharedCredentialsFileEnv: 'AWS_SHARED_CREDENTIALS_FILE', - - /** - * @api private - */ - sharedConfigFileEnv: 'AWS_CONFIG_FILE', - - /** - * @api private - */ - imdsDisabledEnv: 'AWS_EC2_METADATA_DISABLED' -}; - -/** - * @api private - */ -module.exports = util; - - -/***/ }), - -/***/ 23546: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var util = __nccwpck_require__(77985); -var XmlNode = (__nccwpck_require__(20397).XmlNode); -var XmlText = (__nccwpck_require__(90971).XmlText); - -function XmlBuilder() { } - -XmlBuilder.prototype.toXML = function(params, shape, rootElement, noEmpty) { - var xml = new XmlNode(rootElement); - applyNamespaces(xml, shape, true); - serialize(xml, params, shape); - return xml.children.length > 0 || noEmpty ? xml.toString() : ''; -}; - -function serialize(xml, value, shape) { - switch (shape.type) { - case 'structure': return serializeStructure(xml, value, shape); - case 'map': return serializeMap(xml, value, shape); - case 'list': return serializeList(xml, value, shape); - default: return serializeScalar(xml, value, shape); - } -} - -function serializeStructure(xml, params, shape) { - util.arrayEach(shape.memberNames, function(memberName) { - var memberShape = shape.members[memberName]; - if (memberShape.location !== 'body') return; - - var value = params[memberName]; - var name = memberShape.name; - if (value !== undefined && value !== null) { - if (memberShape.isXmlAttribute) { - xml.addAttribute(name, value); - } else if (memberShape.flattened) { - serialize(xml, value, memberShape); - } else { - var element = new XmlNode(name); - xml.addChildNode(element); - applyNamespaces(element, memberShape); - serialize(element, value, memberShape); - } - } - }); -} - -function serializeMap(xml, map, shape) { - var xmlKey = shape.key.name || 'key'; - var xmlValue = shape.value.name || 'value'; - - util.each(map, function(key, value) { - var entry = new XmlNode(shape.flattened ? shape.name : 'entry'); - xml.addChildNode(entry); - - var entryKey = new XmlNode(xmlKey); - var entryValue = new XmlNode(xmlValue); - entry.addChildNode(entryKey); - entry.addChildNode(entryValue); - - serialize(entryKey, key, shape.key); - serialize(entryValue, value, shape.value); - }); -} - -function serializeList(xml, list, shape) { - if (shape.flattened) { - util.arrayEach(list, function(value) { - var name = shape.member.name || shape.name; - var element = new XmlNode(name); - xml.addChildNode(element); - serialize(element, value, shape.member); - }); - } else { - util.arrayEach(list, function(value) { - var name = shape.member.name || 'member'; - var element = new XmlNode(name); - xml.addChildNode(element); - serialize(element, value, shape.member); - }); - } -} - -function serializeScalar(xml, value, shape) { - xml.addChildNode( - new XmlText(shape.toWireFormat(value)) - ); -} - -function applyNamespaces(xml, shape, isRoot) { - var uri, prefix = 'xmlns'; - if (shape.xmlNamespaceUri) { - uri = shape.xmlNamespaceUri; - if (shape.xmlNamespacePrefix) prefix += ':' + shape.xmlNamespacePrefix; - } else if (isRoot && shape.api.xmlNamespaceUri) { - uri = shape.api.xmlNamespaceUri; - } - - if (uri) xml.addAttribute(prefix, uri); -} - -/** - * @api private - */ -module.exports = XmlBuilder; - - -/***/ }), - -/***/ 98241: -/***/ ((module) => { - -/** - * Escapes characters that can not be in an XML attribute. - */ -function escapeAttribute(value) { - return value.replace(/&/g, '&').replace(/'/g, ''').replace(//g, '>').replace(/"/g, '"'); -} - -/** - * @api private - */ -module.exports = { - escapeAttribute: escapeAttribute -}; - - -/***/ }), - -/***/ 98464: -/***/ ((module) => { - -/** - * Escapes characters that can not be in an XML element. - */ -function escapeElement(value) { - return value.replace(/&/g, '&') - .replace(//g, '>') - .replace(/\r/g, ' ') - .replace(/\n/g, ' ') - .replace(/\u0085/g, '…') - .replace(/\u2028/, '
'); -} - -/** - * @api private - */ -module.exports = { - escapeElement: escapeElement -}; - - -/***/ }), - -/***/ 96752: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var AWS = __nccwpck_require__(28437); -var util = AWS.util; -var Shape = AWS.Model.Shape; - -var xml2js = __nccwpck_require__(66189); - -/** - * @api private - */ -var options = { // options passed to xml2js parser - explicitCharkey: false, // undocumented - trim: false, // trim the leading/trailing whitespace from text nodes - normalize: false, // trim interior whitespace inside text nodes - explicitRoot: false, // return the root node in the resulting object? - emptyTag: null, // the default value for empty nodes - explicitArray: true, // always put child nodes in an array - ignoreAttrs: false, // ignore attributes, only create text nodes - mergeAttrs: false, // merge attributes and child elements - validator: null // a callable validator -}; - -function NodeXmlParser() { } - -NodeXmlParser.prototype.parse = function(xml, shape) { - shape = shape || {}; - - var result = null; - var error = null; - - var parser = new xml2js.Parser(options); - parser.parseString(xml, function (e, r) { - error = e; - result = r; - }); - - if (result) { - var data = parseXml(result, shape); - if (result.ResponseMetadata) { - data.ResponseMetadata = parseXml(result.ResponseMetadata[0], {}); - } - return data; - } else if (error) { - throw util.error(error, {code: 'XMLParserError', retryable: true}); - } else { // empty xml document - return parseXml({}, shape); - } -}; - -function parseXml(xml, shape) { - switch (shape.type) { - case 'structure': return parseStructure(xml, shape); - case 'map': return parseMap(xml, shape); - case 'list': return parseList(xml, shape); - case undefined: case null: return parseUnknown(xml); - default: return parseScalar(xml, shape); - } -} - -function parseStructure(xml, shape) { - var data = {}; - if (xml === null) return data; - - util.each(shape.members, function(memberName, memberShape) { - var xmlName = memberShape.name; - if (Object.prototype.hasOwnProperty.call(xml, xmlName) && Array.isArray(xml[xmlName])) { - var xmlChild = xml[xmlName]; - if (!memberShape.flattened) xmlChild = xmlChild[0]; - - data[memberName] = parseXml(xmlChild, memberShape); - } else if (memberShape.isXmlAttribute && - xml.$ && Object.prototype.hasOwnProperty.call(xml.$, xmlName)) { - data[memberName] = parseScalar(xml.$[xmlName], memberShape); - } else if (memberShape.type === 'list' && !shape.api.xmlNoDefaultLists) { - data[memberName] = memberShape.defaultValue; - } - }); - - return data; -} - -function parseMap(xml, shape) { - var data = {}; - if (xml === null) return data; - - var xmlKey = shape.key.name || 'key'; - var xmlValue = shape.value.name || 'value'; - var iterable = shape.flattened ? xml : xml.entry; - - if (Array.isArray(iterable)) { - util.arrayEach(iterable, function(child) { - data[child[xmlKey][0]] = parseXml(child[xmlValue][0], shape.value); - }); - } - - return data; -} - -function parseList(xml, shape) { - var data = []; - var name = shape.member.name || 'member'; - if (shape.flattened) { - util.arrayEach(xml, function(xmlChild) { - data.push(parseXml(xmlChild, shape.member)); - }); - } else if (xml && Array.isArray(xml[name])) { - util.arrayEach(xml[name], function(child) { - data.push(parseXml(child, shape.member)); - }); - } - - return data; -} - -function parseScalar(text, shape) { - if (text && text.$ && text.$.encoding === 'base64') { - shape = new Shape.create({type: text.$.encoding}); - } - if (text && text._) text = text._; - - if (typeof shape.toType === 'function') { - return shape.toType(text); - } else { - return text; - } -} - -function parseUnknown(xml) { - if (xml === undefined || xml === null) return ''; - if (typeof xml === 'string') return xml; - - // parse a list - if (Array.isArray(xml)) { - var arr = []; - for (i = 0; i < xml.length; i++) { - arr.push(parseXml(xml[i], {})); - } - return arr; - } - - // empty object - var keys = Object.keys(xml), i; - if (keys.length === 0 || (keys.length === 1 && keys[0] === '$')) { - return {}; - } - - // object, parse as structure - var data = {}; - for (i = 0; i < keys.length; i++) { - var key = keys[i], value = xml[key]; - if (key === '$') continue; - if (value.length > 1) { // this member is a list - data[key] = parseList(value, {member: {}}); - } else { // this member is a single item - data[key] = parseXml(value[0], {}); - } - } - return data; -} - -/** - * @api private - */ -module.exports = NodeXmlParser; - - -/***/ }), - -/***/ 20397: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var escapeAttribute = (__nccwpck_require__(98241).escapeAttribute); - -/** - * Represents an XML node. - * @api private - */ -function XmlNode(name, children) { - if (children === void 0) { children = []; } - this.name = name; - this.children = children; - this.attributes = {}; -} -XmlNode.prototype.addAttribute = function (name, value) { - this.attributes[name] = value; - return this; -}; -XmlNode.prototype.addChildNode = function (child) { - this.children.push(child); - return this; -}; -XmlNode.prototype.removeAttribute = function (name) { - delete this.attributes[name]; - return this; -}; -XmlNode.prototype.toString = function () { - var hasChildren = Boolean(this.children.length); - var xmlText = '<' + this.name; - // add attributes - var attributes = this.attributes; - for (var i = 0, attributeNames = Object.keys(attributes); i < attributeNames.length; i++) { - var attributeName = attributeNames[i]; - var attribute = attributes[attributeName]; - if (typeof attribute !== 'undefined' && attribute !== null) { - xmlText += ' ' + attributeName + '=\"' + escapeAttribute('' + attribute) + '\"'; - } - } - return xmlText += !hasChildren ? '/>' : '>' + this.children.map(function (c) { return c.toString(); }).join('') + ''; -}; - -/** - * @api private - */ -module.exports = { - XmlNode: XmlNode -}; - - -/***/ }), - -/***/ 90971: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var escapeElement = (__nccwpck_require__(98464).escapeElement); - -/** - * Represents an XML text value. - * @api private - */ -function XmlText(value) { - this.value = value; -} - -XmlText.prototype.toString = function () { - return escapeElement('' + this.value); -}; - -/** - * @api private - */ -module.exports = { - XmlText: XmlText -}; - - -/***/ }), - -/***/ 35827: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -var byteToHex = []; - -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - - return [bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]]].join(''); -} - -var _default = bytesToUuid; -exports["default"] = _default; - -/***/ }), - -/***/ 57821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; -var __webpack_unused_export__; - - -__webpack_unused_export__ = ({ - value: true -}); -__webpack_unused_export__ = ({ - enumerable: true, - get: function () { - return _v.default; - } -}); -__webpack_unused_export__ = ({ - enumerable: true, - get: function () { - return _v2.default; - } -}); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -__webpack_unused_export__ = ({ - enumerable: true, - get: function () { - return _v4.default; - } -}); - -var _v = _interopRequireDefault(__nccwpck_require__(67668)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(98573)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(7811)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(46508)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 93525: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 49788: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function rng() { - return _crypto.default.randomBytes(16); -} - -/***/ }), - -/***/ 7387: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 67668: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(49788)); - -var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -var _nodeId; - -var _clockseq; // Previous uuid creation time - - -var _lastMSecs = 0; -var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - options = options || {}; - var node = options.node || _nodeId; - var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - var seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - var tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf ? buf : (0, _bytesToUuid.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 98573: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(36097)); - -var _md = _interopRequireDefault(__nccwpck_require__(93525)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 36097: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function uuidToBytes(uuid) { - // Note: We assume we're being passed a valid uuid string - var bytes = []; - uuid.replace(/[a-fA-F0-9]{2}/g, function (hex) { - bytes.push(parseInt(hex, 16)); - }); - return bytes; -} - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - var bytes = new Array(str.length); - - for (var i = 0; i < str.length; i++) { - bytes[i] = str.charCodeAt(i); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - var generateUUID = function (value, namespace, buf, offset) { - var off = buf && offset || 0; - if (typeof value == 'string') value = stringToBytes(value); - if (typeof namespace == 'string') namespace = uuidToBytes(namespace); - if (!Array.isArray(value)) throw TypeError('value must be an array of bytes'); - if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values'); // Per 4.3 - - var bytes = hashfunc(namespace.concat(value)); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - for (var idx = 0; idx < 16; ++idx) { - buf[off + idx] = bytes[idx]; - } - } - - return buf || (0, _bytesToUuid.default)(bytes); - }; // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 7811: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(49788)); - -var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof options == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - - options = options || {}; - - var rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - - return buf || (0, _bytesToUuid.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 46508: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(36097)); - -var _sha = _interopRequireDefault(__nccwpck_require__(7387)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 96323: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; -var __webpack_unused_export__; - -__webpack_unused_export__ = ({ value: true }); -var LRU_1 = __nccwpck_require__(77710); -var CACHE_SIZE = 1000; -/** - * Inspired node-lru-cache[https://github.com/isaacs/node-lru-cache] - */ -var EndpointCache = /** @class */ (function () { - function EndpointCache(maxSize) { - if (maxSize === void 0) { maxSize = CACHE_SIZE; } - this.maxSize = maxSize; - this.cache = new LRU_1.LRUCache(maxSize); - } - ; - Object.defineProperty(EndpointCache.prototype, "size", { - get: function () { - return this.cache.length; - }, - enumerable: true, - configurable: true - }); - EndpointCache.prototype.put = function (key, value) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - var endpointRecord = this.populateValue(value); - this.cache.put(keyString, endpointRecord); - }; - EndpointCache.prototype.get = function (key) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - var now = Date.now(); - var records = this.cache.get(keyString); - if (records) { - for (var i = records.length-1; i >= 0; i--) { - var record = records[i]; - if (record.Expire < now) { - records.splice(i, 1); - } - } - if (records.length === 0) { - this.cache.remove(keyString); - return undefined; - } - } - return records; - }; - EndpointCache.getKeyString = function (key) { - var identifiers = []; - var identifierNames = Object.keys(key).sort(); - for (var i = 0; i < identifierNames.length; i++) { - var identifierName = identifierNames[i]; - if (key[identifierName] === undefined) - continue; - identifiers.push(key[identifierName]); - } - return identifiers.join(' '); - }; - EndpointCache.prototype.populateValue = function (endpoints) { - var now = Date.now(); - return endpoints.map(function (endpoint) { return ({ - Address: endpoint.Address || '', - Expire: now + (endpoint.CachePeriodInMinutes || 1) * 60 * 1000 - }); }); - }; - EndpointCache.prototype.empty = function () { - this.cache.empty(); - }; - EndpointCache.prototype.remove = function (key) { - var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; - this.cache.remove(keyString); - }; - return EndpointCache; -}()); -exports.$ = EndpointCache; - -/***/ }), - -/***/ 77710: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var LinkedListNode = /** @class */ (function () { - function LinkedListNode(key, value) { - this.key = key; - this.value = value; - } - return LinkedListNode; -}()); -var LRUCache = /** @class */ (function () { - function LRUCache(size) { - this.nodeMap = {}; - this.size = 0; - if (typeof size !== 'number' || size < 1) { - throw new Error('Cache size can only be positive number'); - } - this.sizeLimit = size; - } - Object.defineProperty(LRUCache.prototype, "length", { - get: function () { - return this.size; - }, - enumerable: true, - configurable: true - }); - LRUCache.prototype.prependToList = function (node) { - if (!this.headerNode) { - this.tailNode = node; - } - else { - this.headerNode.prev = node; - node.next = this.headerNode; - } - this.headerNode = node; - this.size++; - }; - LRUCache.prototype.removeFromTail = function () { - if (!this.tailNode) { - return undefined; - } - var node = this.tailNode; - var prevNode = node.prev; - if (prevNode) { - prevNode.next = undefined; - } - node.prev = undefined; - this.tailNode = prevNode; - this.size--; - return node; - }; - LRUCache.prototype.detachFromList = function (node) { - if (this.headerNode === node) { - this.headerNode = node.next; - } - if (this.tailNode === node) { - this.tailNode = node.prev; - } - if (node.prev) { - node.prev.next = node.next; - } - if (node.next) { - node.next.prev = node.prev; - } - node.next = undefined; - node.prev = undefined; - this.size--; - }; - LRUCache.prototype.get = function (key) { - if (this.nodeMap[key]) { - var node = this.nodeMap[key]; - this.detachFromList(node); - this.prependToList(node); - return node.value; - } - }; - LRUCache.prototype.remove = function (key) { - if (this.nodeMap[key]) { - var node = this.nodeMap[key]; - this.detachFromList(node); - delete this.nodeMap[key]; - } - }; - LRUCache.prototype.put = function (key, value) { - if (this.nodeMap[key]) { - this.remove(key); - } - else if (this.size === this.sizeLimit) { - var tailNode = this.removeFromTail(); - var key_1 = tailNode.key; - delete this.nodeMap[key_1]; - } - var newNode = new LinkedListNode(key, value); - this.nodeMap[key] = newNode; - this.prependToList(newNode); - }; - LRUCache.prototype.empty = function () { - var keys = Object.keys(this.nodeMap); - for (var i = 0; i < keys.length; i++) { - var key = keys[i]; - var node = this.nodeMap[key]; - this.detachFromList(node); - delete this.nodeMap[key]; - } - }; - return LRUCache; -}()); -exports.LRUCache = LRUCache; - -/***/ }), - -/***/ 28222: -/***/ ((module, exports, __nccwpck_require__) => { - -/* eslint-env browser */ - -/** - * This is the web browser implementation of `debug()`. - */ - -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.storage = localstorage(); -exports.destroy = (() => { - let warned = false; - - return () => { - if (!warned) { - warned = true; - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - }; -})(); - -/** - * Colors. - */ - -exports.colors = [ - '#0000CC', - '#0000FF', - '#0033CC', - '#0033FF', - '#0066CC', - '#0066FF', - '#0099CC', - '#0099FF', - '#00CC00', - '#00CC33', - '#00CC66', - '#00CC99', - '#00CCCC', - '#00CCFF', - '#3300CC', - '#3300FF', - '#3333CC', - '#3333FF', - '#3366CC', - '#3366FF', - '#3399CC', - '#3399FF', - '#33CC00', - '#33CC33', - '#33CC66', - '#33CC99', - '#33CCCC', - '#33CCFF', - '#6600CC', - '#6600FF', - '#6633CC', - '#6633FF', - '#66CC00', - '#66CC33', - '#9900CC', - '#9900FF', - '#9933CC', - '#9933FF', - '#99CC00', - '#99CC33', - '#CC0000', - '#CC0033', - '#CC0066', - '#CC0099', - '#CC00CC', - '#CC00FF', - '#CC3300', - '#CC3333', - '#CC3366', - '#CC3399', - '#CC33CC', - '#CC33FF', - '#CC6600', - '#CC6633', - '#CC9900', - '#CC9933', - '#CCCC00', - '#CCCC33', - '#FF0000', - '#FF0033', - '#FF0066', - '#FF0099', - '#FF00CC', - '#FF00FF', - '#FF3300', - '#FF3333', - '#FF3366', - '#FF3399', - '#FF33CC', - '#FF33FF', - '#FF6600', - '#FF6633', - '#FF9900', - '#FF9933', - '#FFCC00', - '#FFCC33' -]; - -/** - * Currently only WebKit-based Web Inspectors, Firefox >= v31, - * and the Firebug extension (any Firefox version) are known - * to support "%c" CSS customizations. - * - * TODO: add a `localStorage` variable to explicitly enable/disable colors - */ - -// eslint-disable-next-line complexity -function useColors() { - // NB: In an Electron preload script, document will be defined but not fully - // initialized. Since we know we're in Chrome, we'll just detect this case - // explicitly - if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { - return true; - } - - // Internet Explorer and Edge do not support colors. - if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } - - // Is webkit? http://stackoverflow.com/a/16459606/376773 - // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 - return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || - // Is firebug? http://stackoverflow.com/a/398120/376773 - (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || - // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || - // Double check webkit in userAgent just in case we are in a worker - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); -} - -/** - * Colorize log arguments if enabled. - * - * @api public - */ - -function formatArgs(args) { - args[0] = (this.useColors ? '%c' : '') + - this.namespace + - (this.useColors ? ' %c' : ' ') + - args[0] + - (this.useColors ? '%c ' : ' ') + - '+' + module.exports.humanize(this.diff); - - if (!this.useColors) { - return; - } - - const c = 'color: ' + this.color; - args.splice(1, 0, c, 'color: inherit'); - - // The final "%c" is somewhat tricky, because there could be other - // arguments passed either before or after the %c, so we need to - // figure out the correct index to insert the CSS into - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, match => { - if (match === '%%') { - return; - } - index++; - if (match === '%c') { - // We only are interested in the *last* %c - // (the user may have provided their own) - lastC = index; - } - }); - - args.splice(lastC, 0, c); -} - -/** - * Invokes `console.debug()` when available. - * No-op when `console.debug` is not a "function". - * If `console.debug` is not available, falls back - * to `console.log`. - * - * @api public - */ -exports.log = console.debug || console.log || (() => {}); - -/** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private - */ -function save(namespaces) { - try { - if (namespaces) { - exports.storage.setItem('debug', namespaces); - } else { - exports.storage.removeItem('debug'); - } - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } -} - -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ -function load() { - let r; - try { - r = exports.storage.getItem('debug'); - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } - - // If debug isn't set in LS, and we're in Electron, try to load $DEBUG - if (!r && typeof process !== 'undefined' && 'env' in process) { - r = process.env.DEBUG; - } - - return r; -} - -/** - * Localstorage attempts to return the localstorage. - * - * This is necessary because safari throws - * when a user disables cookies/localstorage - * and you attempt to access it. - * - * @return {LocalStorage} - * @api private - */ - -function localstorage() { - try { - // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context - // The Browser also has localStorage in the global context. - return localStorage; - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } -} - -module.exports = __nccwpck_require__(46243)(exports); - -const {formatters} = module.exports; - -/** - * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. - */ - -formatters.j = function (v) { - try { - return JSON.stringify(v); - } catch (error) { - return '[UnexpectedJSONParseError]: ' + error.message; - } -}; - - -/***/ }), - -/***/ 46243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -/** - * This is the common logic for both the Node.js and web browser - * implementations of `debug()`. - */ - -function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = __nccwpck_require__(80900); - createDebug.destroy = destroy; - - Object.keys(env).forEach(key => { - createDebug[key] = env[key]; - }); - - /** - * The currently active debug mode names, and names to skip. - */ - - createDebug.names = []; - createDebug.skips = []; - - /** - * Map of special "%n" handling functions, for the debug "format" argument. - * - * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". - */ - createDebug.formatters = {}; - - /** - * Selects a color for a debug namespace - * @param {String} namespace The namespace string for the debug instance to be colored - * @return {Number|String} An ANSI color code for the given namespace - * @api private - */ - function selectColor(namespace) { - let hash = 0; - - for (let i = 0; i < namespace.length; i++) { - hash = ((hash << 5) - hash) + namespace.charCodeAt(i); - hash |= 0; // Convert to 32bit integer - } - - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; - - /** - * Create a debugger with the given `namespace`. - * - * @param {String} namespace - * @return {Function} - * @api public - */ - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - - function debug(...args) { - // Disabled? - if (!debug.enabled) { - return; - } - - const self = debug; - - // Set `diff` timestamp - const curr = Number(new Date()); - const ms = curr - (prevTime || curr); - self.diff = ms; - self.prev = prevTime; - self.curr = curr; - prevTime = curr; - - args[0] = createDebug.coerce(args[0]); - - if (typeof args[0] !== 'string') { - // Anything else let's inspect with %O - args.unshift('%O'); - } - - // Apply any `formatters` transformations - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - // If we encounter an escaped % then don't increase the array index - if (match === '%%') { - return '%'; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === 'function') { - const val = args[index]; - match = formatter.call(self, val); - - // Now we need to remove `args[index]` since it's inlined in the `format` - args.splice(index, 1); - index--; - } - return match; - }); - - // Apply env-specific formatting (colors, etc.) - createDebug.formatArgs.call(self, args); - - const logFn = self.log || createDebug.log; - logFn.apply(self, args); - } - - debug.namespace = namespace; - debug.useColors = createDebug.useColors(); - debug.color = createDebug.selectColor(namespace); - debug.extend = extend; - debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. - - Object.defineProperty(debug, 'enabled', { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } - - return enabledCache; - }, - set: v => { - enableOverride = v; - } - }); - - // Env-specific initialization logic for debug instances - if (typeof createDebug.init === 'function') { - createDebug.init(debug); - } - - return debug; - } - - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } - - /** - * Enables a debug mode by namespaces. This can include modes - * separated by a colon and wildcards. - * - * @param {String} namespaces - * @api public - */ - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - - createDebug.names = []; - createDebug.skips = []; - - let i; - const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); - const len = split.length; - - for (i = 0; i < len; i++) { - if (!split[i]) { - // ignore empty strings - continue; - } - - namespaces = split[i].replace(/\*/g, '.*?'); - - if (namespaces[0] === '-') { - createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); - } else { - createDebug.names.push(new RegExp('^' + namespaces + '$')); - } - } - } - - /** - * Disable debug output. - * - * @return {String} namespaces - * @api public - */ - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) - ].join(','); - createDebug.enable(''); - return namespaces; - } - - /** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - function enabled(name) { - if (name[name.length - 1] === '*') { - return true; - } - - let i; - let len; - - for (i = 0, len = createDebug.skips.length; i < len; i++) { - if (createDebug.skips[i].test(name)) { - return false; - } - } - - for (i = 0, len = createDebug.names.length; i < len; i++) { - if (createDebug.names[i].test(name)) { - return true; - } - } - - return false; - } - - /** - * Convert regexp to namespace - * - * @param {RegExp} regxep - * @return {String} namespace - * @api private - */ - function toNamespace(regexp) { - return regexp.toString() - .substring(2, regexp.toString().length - 2) - .replace(/\.\*\?$/, '*'); - } - - /** - * Coerce `val`. - * - * @param {Mixed} val - * @return {Mixed} - * @api private - */ - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } - - /** - * XXX DO NOT USE. This is a temporary stub function. - * XXX It WILL be removed in the next major release. - */ - function destroy() { - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - - createDebug.enable(createDebug.load()); - - return createDebug; -} - -module.exports = setup; - - -/***/ }), - -/***/ 38237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * Detect Electron renderer / nwjs process, which is node, but we should - * treat as a browser. - */ - -if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { - module.exports = __nccwpck_require__(28222); -} else { - module.exports = __nccwpck_require__(35332); -} - - -/***/ }), - -/***/ 35332: -/***/ ((module, exports, __nccwpck_require__) => { - -/** - * Module dependencies. - */ - -const tty = __nccwpck_require__(76224); -const util = __nccwpck_require__(73837); - -/** - * This is the Node.js implementation of `debug()`. - */ - -exports.init = init; -exports.log = log; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.destroy = util.deprecate( - () => {}, - 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' -); - -/** - * Colors. - */ - -exports.colors = [6, 2, 3, 4, 5, 1]; - -try { - // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) - // eslint-disable-next-line import/no-extraneous-dependencies - const supportsColor = __nccwpck_require__(59318); - - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } -} catch (error) { - // Swallow - we only care if `supports-color` is available; it doesn't have to be. -} - -/** - * Build up the default `inspectOpts` object from the environment variables. - * - * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js - */ - -exports.inspectOpts = Object.keys(process.env).filter(key => { - return /^debug_/i.test(key); -}).reduce((obj, key) => { - // Camel-case - const prop = key - .substring(6) - .toLowerCase() - .replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - - // Coerce string value into JS value - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === 'null') { - val = null; - } else { - val = Number(val); - } - - obj[prop] = val; - return obj; -}, {}); - -/** - * Is stdout a TTY? Colored output is enabled when `true`. - */ - -function useColors() { - return 'colors' in exports.inspectOpts ? - Boolean(exports.inspectOpts.colors) : - tty.isatty(process.stderr.fd); -} - -/** - * Adds ANSI color escape codes if enabled. - * - * @api public - */ - -function formatArgs(args) { - const {namespace: name, useColors} = this; - - if (useColors) { - const c = this.color; - const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); - const prefix = ` ${colorCode};1m${name} \u001B[0m`; - - args[0] = prefix + args[0].split('\n').join('\n' + prefix); - args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); - } else { - args[0] = getDate() + name + ' ' + args[0]; - } -} - -function getDate() { - if (exports.inspectOpts.hideDate) { - return ''; - } - return new Date().toISOString() + ' '; -} - -/** - * Invokes `util.format()` with the specified arguments and writes to stderr. - */ - -function log(...args) { - return process.stderr.write(util.format(...args) + '\n'); -} - -/** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private - */ -function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - // If you set a process.env field to null or undefined, it gets cast to the - // string 'null' or 'undefined'. Just delete instead. - delete process.env.DEBUG; - } -} - -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ - -function load() { - return process.env.DEBUG; -} - -/** - * Init logic for `debug` instances. - * - * Create a new `inspectOpts` object in case `useColors` is set - * differently for a particular `debug` instance. - */ - -function init(debug) { - debug.inspectOpts = {}; - - const keys = Object.keys(exports.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; - } -} - -module.exports = __nccwpck_require__(46243)(exports); - -const {formatters} = module.exports; - -/** - * Map %o to `util.inspect()`, all on a single line. - */ - -formatters.o = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts) - .split('\n') - .map(str => str.trim()) - .join(' '); -}; - -/** - * Map %O to `util.inspect()`, allowing multiple lines if needed. - */ - -formatters.O = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); -}; - - -/***/ }), - -/***/ 31621: -/***/ ((module) => { - -"use strict"; - - -module.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf('--'); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); -}; - - -/***/ }), - -/***/ 15098: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(41808)); -const tls_1 = __importDefault(__nccwpck_require__(24404)); -const url_1 = __importDefault(__nccwpck_require__(57310)); -const assert_1 = __importDefault(__nccwpck_require__(39491)); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const agent_base_1 = __nccwpck_require__(49690); -const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); -const debug = debug_1.default('https-proxy-agent:agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - * - * @api public - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('creating new HttpsProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1']; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - const headers = Object.assign({}, proxy.headers); - const hostname = `${opts.host}:${opts.port}`; - let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; - } - // The `Host` header should only include the port - // number when it is not the default port. - let { host, port, secureEndpoint } = opts; - if (!isDefaultPort(port, secureEndpoint)) { - host += `:${port}`; - } - headers.Host = host; - headers.Connection = 'close'; - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; - } - const proxyResponsePromise = parse_proxy_response_1.default(socket); - socket.write(`${payload}\r\n`); - const { statusCode, buffered } = yield proxyResponsePromise; - if (statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, - servername })); - } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net_1.default.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('replaying proxy buffer for failed request'); - assert_1.default(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; - }); - } -} -exports["default"] = HttpsProxyAgent; -function resume(socket) { - socket.resume(); -} -function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); -} -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } - } - return ret; -} -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 77219: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(15098)); -function createHttpsProxyAgent(opts) { - return new agent_1.default(opts); -} -(function (createHttpsProxyAgent) { - createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; - createHttpsProxyAgent.prototype = agent_1.default.prototype; -})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); -module.exports = createHttpsProxyAgent; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 595: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const debug_1 = __importDefault(__nccwpck_require__(38237)); -const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - function onclose(err) { - debug('onclose had error %o', err); - } - function onend() { - debug('onend'); - } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); - const statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - resolve({ - statusCode, - buffered - }); - } - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - read(); - }); -} -exports["default"] = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map - -/***/ }), - -/***/ 87783: -/***/ ((__unused_webpack_module, exports) => { - -(function(exports) { - "use strict"; - - function isArray(obj) { - if (obj !== null) { - return Object.prototype.toString.call(obj) === "[object Array]"; - } else { - return false; - } - } - - function isObject(obj) { - if (obj !== null) { - return Object.prototype.toString.call(obj) === "[object Object]"; - } else { - return false; - } - } - - function strictDeepEqual(first, second) { - // Check the scalar case first. - if (first === second) { - return true; - } - - // Check if they are the same type. - var firstType = Object.prototype.toString.call(first); - if (firstType !== Object.prototype.toString.call(second)) { - return false; - } - // We know that first and second have the same type so we can just check the - // first type from now on. - if (isArray(first) === true) { - // Short circuit if they're not the same length; - if (first.length !== second.length) { - return false; - } - for (var i = 0; i < first.length; i++) { - if (strictDeepEqual(first[i], second[i]) === false) { - return false; - } - } - return true; - } - if (isObject(first) === true) { - // An object is equal if it has the same key/value pairs. - var keysSeen = {}; - for (var key in first) { - if (hasOwnProperty.call(first, key)) { - if (strictDeepEqual(first[key], second[key]) === false) { - return false; - } - keysSeen[key] = true; - } - } - // Now check that there aren't any keys in second that weren't - // in first. - for (var key2 in second) { - if (hasOwnProperty.call(second, key2)) { - if (keysSeen[key2] !== true) { - return false; - } - } - } - return true; - } - return false; - } - - function isFalse(obj) { - // From the spec: - // A false value corresponds to the following values: - // Empty list - // Empty object - // Empty string - // False boolean - // null value - - // First check the scalar values. - if (obj === "" || obj === false || obj === null) { - return true; - } else if (isArray(obj) && obj.length === 0) { - // Check for an empty array. - return true; - } else if (isObject(obj)) { - // Check for an empty object. - for (var key in obj) { - // If there are any keys, then - // the object is not empty so the object - // is not false. - if (obj.hasOwnProperty(key)) { - return false; - } - } - return true; - } else { - return false; - } - } - - function objValues(obj) { - var keys = Object.keys(obj); - var values = []; - for (var i = 0; i < keys.length; i++) { - values.push(obj[keys[i]]); - } - return values; - } - - function merge(a, b) { - var merged = {}; - for (var key in a) { - merged[key] = a[key]; - } - for (var key2 in b) { - merged[key2] = b[key2]; - } - return merged; - } - - var trimLeft; - if (typeof String.prototype.trimLeft === "function") { - trimLeft = function(str) { - return str.trimLeft(); - }; - } else { - trimLeft = function(str) { - return str.match(/^\s*(.*)/)[1]; - }; - } - - // Type constants used to define functions. - var TYPE_NUMBER = 0; - var TYPE_ANY = 1; - var TYPE_STRING = 2; - var TYPE_ARRAY = 3; - var TYPE_OBJECT = 4; - var TYPE_BOOLEAN = 5; - var TYPE_EXPREF = 6; - var TYPE_NULL = 7; - var TYPE_ARRAY_NUMBER = 8; - var TYPE_ARRAY_STRING = 9; - var TYPE_NAME_TABLE = { - 0: 'number', - 1: 'any', - 2: 'string', - 3: 'array', - 4: 'object', - 5: 'boolean', - 6: 'expression', - 7: 'null', - 8: 'Array', - 9: 'Array' - }; - - var TOK_EOF = "EOF"; - var TOK_UNQUOTEDIDENTIFIER = "UnquotedIdentifier"; - var TOK_QUOTEDIDENTIFIER = "QuotedIdentifier"; - var TOK_RBRACKET = "Rbracket"; - var TOK_RPAREN = "Rparen"; - var TOK_COMMA = "Comma"; - var TOK_COLON = "Colon"; - var TOK_RBRACE = "Rbrace"; - var TOK_NUMBER = "Number"; - var TOK_CURRENT = "Current"; - var TOK_EXPREF = "Expref"; - var TOK_PIPE = "Pipe"; - var TOK_OR = "Or"; - var TOK_AND = "And"; - var TOK_EQ = "EQ"; - var TOK_GT = "GT"; - var TOK_LT = "LT"; - var TOK_GTE = "GTE"; - var TOK_LTE = "LTE"; - var TOK_NE = "NE"; - var TOK_FLATTEN = "Flatten"; - var TOK_STAR = "Star"; - var TOK_FILTER = "Filter"; - var TOK_DOT = "Dot"; - var TOK_NOT = "Not"; - var TOK_LBRACE = "Lbrace"; - var TOK_LBRACKET = "Lbracket"; - var TOK_LPAREN= "Lparen"; - var TOK_LITERAL= "Literal"; - - // The "&", "[", "<", ">" tokens - // are not in basicToken because - // there are two token variants - // ("&&", "[?", "<=", ">="). This is specially handled - // below. - - var basicTokens = { - ".": TOK_DOT, - "*": TOK_STAR, - ",": TOK_COMMA, - ":": TOK_COLON, - "{": TOK_LBRACE, - "}": TOK_RBRACE, - "]": TOK_RBRACKET, - "(": TOK_LPAREN, - ")": TOK_RPAREN, - "@": TOK_CURRENT - }; - - var operatorStartToken = { - "<": true, - ">": true, - "=": true, - "!": true - }; - - var skipChars = { - " ": true, - "\t": true, - "\n": true - }; - - - function isAlpha(ch) { - return (ch >= "a" && ch <= "z") || - (ch >= "A" && ch <= "Z") || - ch === "_"; - } - - function isNum(ch) { - return (ch >= "0" && ch <= "9") || - ch === "-"; - } - function isAlphaNum(ch) { - return (ch >= "a" && ch <= "z") || - (ch >= "A" && ch <= "Z") || - (ch >= "0" && ch <= "9") || - ch === "_"; - } - - function Lexer() { - } - Lexer.prototype = { - tokenize: function(stream) { - var tokens = []; - this._current = 0; - var start; - var identifier; - var token; - while (this._current < stream.length) { - if (isAlpha(stream[this._current])) { - start = this._current; - identifier = this._consumeUnquotedIdentifier(stream); - tokens.push({type: TOK_UNQUOTEDIDENTIFIER, - value: identifier, - start: start}); - } else if (basicTokens[stream[this._current]] !== undefined) { - tokens.push({type: basicTokens[stream[this._current]], - value: stream[this._current], - start: this._current}); - this._current++; - } else if (isNum(stream[this._current])) { - token = this._consumeNumber(stream); - tokens.push(token); - } else if (stream[this._current] === "[") { - // No need to increment this._current. This happens - // in _consumeLBracket - token = this._consumeLBracket(stream); - tokens.push(token); - } else if (stream[this._current] === "\"") { - start = this._current; - identifier = this._consumeQuotedIdentifier(stream); - tokens.push({type: TOK_QUOTEDIDENTIFIER, - value: identifier, - start: start}); - } else if (stream[this._current] === "'") { - start = this._current; - identifier = this._consumeRawStringLiteral(stream); - tokens.push({type: TOK_LITERAL, - value: identifier, - start: start}); - } else if (stream[this._current] === "`") { - start = this._current; - var literal = this._consumeLiteral(stream); - tokens.push({type: TOK_LITERAL, - value: literal, - start: start}); - } else if (operatorStartToken[stream[this._current]] !== undefined) { - tokens.push(this._consumeOperator(stream)); - } else if (skipChars[stream[this._current]] !== undefined) { - // Ignore whitespace. - this._current++; - } else if (stream[this._current] === "&") { - start = this._current; - this._current++; - if (stream[this._current] === "&") { - this._current++; - tokens.push({type: TOK_AND, value: "&&", start: start}); - } else { - tokens.push({type: TOK_EXPREF, value: "&", start: start}); - } - } else if (stream[this._current] === "|") { - start = this._current; - this._current++; - if (stream[this._current] === "|") { - this._current++; - tokens.push({type: TOK_OR, value: "||", start: start}); - } else { - tokens.push({type: TOK_PIPE, value: "|", start: start}); - } - } else { - var error = new Error("Unknown character:" + stream[this._current]); - error.name = "LexerError"; - throw error; - } - } - return tokens; - }, - - _consumeUnquotedIdentifier: function(stream) { - var start = this._current; - this._current++; - while (this._current < stream.length && isAlphaNum(stream[this._current])) { - this._current++; - } - return stream.slice(start, this._current); - }, - - _consumeQuotedIdentifier: function(stream) { - var start = this._current; - this._current++; - var maxLength = stream.length; - while (stream[this._current] !== "\"" && this._current < maxLength) { - // You can escape a double quote and you can escape an escape. - var current = this._current; - if (stream[current] === "\\" && (stream[current + 1] === "\\" || - stream[current + 1] === "\"")) { - current += 2; - } else { - current++; - } - this._current = current; - } - this._current++; - return JSON.parse(stream.slice(start, this._current)); - }, - - _consumeRawStringLiteral: function(stream) { - var start = this._current; - this._current++; - var maxLength = stream.length; - while (stream[this._current] !== "'" && this._current < maxLength) { - // You can escape a single quote and you can escape an escape. - var current = this._current; - if (stream[current] === "\\" && (stream[current + 1] === "\\" || - stream[current + 1] === "'")) { - current += 2; - } else { - current++; - } - this._current = current; - } - this._current++; - var literal = stream.slice(start + 1, this._current - 1); - return literal.replace("\\'", "'"); - }, - - _consumeNumber: function(stream) { - var start = this._current; - this._current++; - var maxLength = stream.length; - while (isNum(stream[this._current]) && this._current < maxLength) { - this._current++; - } - var value = parseInt(stream.slice(start, this._current)); - return {type: TOK_NUMBER, value: value, start: start}; - }, - - _consumeLBracket: function(stream) { - var start = this._current; - this._current++; - if (stream[this._current] === "?") { - this._current++; - return {type: TOK_FILTER, value: "[?", start: start}; - } else if (stream[this._current] === "]") { - this._current++; - return {type: TOK_FLATTEN, value: "[]", start: start}; - } else { - return {type: TOK_LBRACKET, value: "[", start: start}; - } - }, - - _consumeOperator: function(stream) { - var start = this._current; - var startingChar = stream[start]; - this._current++; - if (startingChar === "!") { - if (stream[this._current] === "=") { - this._current++; - return {type: TOK_NE, value: "!=", start: start}; - } else { - return {type: TOK_NOT, value: "!", start: start}; - } - } else if (startingChar === "<") { - if (stream[this._current] === "=") { - this._current++; - return {type: TOK_LTE, value: "<=", start: start}; - } else { - return {type: TOK_LT, value: "<", start: start}; - } - } else if (startingChar === ">") { - if (stream[this._current] === "=") { - this._current++; - return {type: TOK_GTE, value: ">=", start: start}; - } else { - return {type: TOK_GT, value: ">", start: start}; - } - } else if (startingChar === "=") { - if (stream[this._current] === "=") { - this._current++; - return {type: TOK_EQ, value: "==", start: start}; - } - } - }, - - _consumeLiteral: function(stream) { - this._current++; - var start = this._current; - var maxLength = stream.length; - var literal; - while(stream[this._current] !== "`" && this._current < maxLength) { - // You can escape a literal char or you can escape the escape. - var current = this._current; - if (stream[current] === "\\" && (stream[current + 1] === "\\" || - stream[current + 1] === "`")) { - current += 2; - } else { - current++; - } - this._current = current; - } - var literalString = trimLeft(stream.slice(start, this._current)); - literalString = literalString.replace("\\`", "`"); - if (this._looksLikeJSON(literalString)) { - literal = JSON.parse(literalString); - } else { - // Try to JSON parse it as "" - literal = JSON.parse("\"" + literalString + "\""); - } - // +1 gets us to the ending "`", +1 to move on to the next char. - this._current++; - return literal; - }, - - _looksLikeJSON: function(literalString) { - var startingChars = "[{\""; - var jsonLiterals = ["true", "false", "null"]; - var numberLooking = "-0123456789"; - - if (literalString === "") { - return false; - } else if (startingChars.indexOf(literalString[0]) >= 0) { - return true; - } else if (jsonLiterals.indexOf(literalString) >= 0) { - return true; - } else if (numberLooking.indexOf(literalString[0]) >= 0) { - try { - JSON.parse(literalString); - return true; - } catch (ex) { - return false; - } - } else { - return false; - } - } - }; - - var bindingPower = {}; - bindingPower[TOK_EOF] = 0; - bindingPower[TOK_UNQUOTEDIDENTIFIER] = 0; - bindingPower[TOK_QUOTEDIDENTIFIER] = 0; - bindingPower[TOK_RBRACKET] = 0; - bindingPower[TOK_RPAREN] = 0; - bindingPower[TOK_COMMA] = 0; - bindingPower[TOK_RBRACE] = 0; - bindingPower[TOK_NUMBER] = 0; - bindingPower[TOK_CURRENT] = 0; - bindingPower[TOK_EXPREF] = 0; - bindingPower[TOK_PIPE] = 1; - bindingPower[TOK_OR] = 2; - bindingPower[TOK_AND] = 3; - bindingPower[TOK_EQ] = 5; - bindingPower[TOK_GT] = 5; - bindingPower[TOK_LT] = 5; - bindingPower[TOK_GTE] = 5; - bindingPower[TOK_LTE] = 5; - bindingPower[TOK_NE] = 5; - bindingPower[TOK_FLATTEN] = 9; - bindingPower[TOK_STAR] = 20; - bindingPower[TOK_FILTER] = 21; - bindingPower[TOK_DOT] = 40; - bindingPower[TOK_NOT] = 45; - bindingPower[TOK_LBRACE] = 50; - bindingPower[TOK_LBRACKET] = 55; - bindingPower[TOK_LPAREN] = 60; - - function Parser() { - } - - Parser.prototype = { - parse: function(expression) { - this._loadTokens(expression); - this.index = 0; - var ast = this.expression(0); - if (this._lookahead(0) !== TOK_EOF) { - var t = this._lookaheadToken(0); - var error = new Error( - "Unexpected token type: " + t.type + ", value: " + t.value); - error.name = "ParserError"; - throw error; - } - return ast; - }, - - _loadTokens: function(expression) { - var lexer = new Lexer(); - var tokens = lexer.tokenize(expression); - tokens.push({type: TOK_EOF, value: "", start: expression.length}); - this.tokens = tokens; - }, - - expression: function(rbp) { - var leftToken = this._lookaheadToken(0); - this._advance(); - var left = this.nud(leftToken); - var currentToken = this._lookahead(0); - while (rbp < bindingPower[currentToken]) { - this._advance(); - left = this.led(currentToken, left); - currentToken = this._lookahead(0); - } - return left; - }, - - _lookahead: function(number) { - return this.tokens[this.index + number].type; - }, - - _lookaheadToken: function(number) { - return this.tokens[this.index + number]; - }, - - _advance: function() { - this.index++; - }, - - nud: function(token) { - var left; - var right; - var expression; - switch (token.type) { - case TOK_LITERAL: - return {type: "Literal", value: token.value}; - case TOK_UNQUOTEDIDENTIFIER: - return {type: "Field", name: token.value}; - case TOK_QUOTEDIDENTIFIER: - var node = {type: "Field", name: token.value}; - if (this._lookahead(0) === TOK_LPAREN) { - throw new Error("Quoted identifier not allowed for function names."); - } - return node; - case TOK_NOT: - right = this.expression(bindingPower.Not); - return {type: "NotExpression", children: [right]}; - case TOK_STAR: - left = {type: "Identity"}; - right = null; - if (this._lookahead(0) === TOK_RBRACKET) { - // This can happen in a multiselect, - // [a, b, *] - right = {type: "Identity"}; - } else { - right = this._parseProjectionRHS(bindingPower.Star); - } - return {type: "ValueProjection", children: [left, right]}; - case TOK_FILTER: - return this.led(token.type, {type: "Identity"}); - case TOK_LBRACE: - return this._parseMultiselectHash(); - case TOK_FLATTEN: - left = {type: TOK_FLATTEN, children: [{type: "Identity"}]}; - right = this._parseProjectionRHS(bindingPower.Flatten); - return {type: "Projection", children: [left, right]}; - case TOK_LBRACKET: - if (this._lookahead(0) === TOK_NUMBER || this._lookahead(0) === TOK_COLON) { - right = this._parseIndexExpression(); - return this._projectIfSlice({type: "Identity"}, right); - } else if (this._lookahead(0) === TOK_STAR && - this._lookahead(1) === TOK_RBRACKET) { - this._advance(); - this._advance(); - right = this._parseProjectionRHS(bindingPower.Star); - return {type: "Projection", - children: [{type: "Identity"}, right]}; - } - return this._parseMultiselectList(); - case TOK_CURRENT: - return {type: TOK_CURRENT}; - case TOK_EXPREF: - expression = this.expression(bindingPower.Expref); - return {type: "ExpressionReference", children: [expression]}; - case TOK_LPAREN: - var args = []; - while (this._lookahead(0) !== TOK_RPAREN) { - if (this._lookahead(0) === TOK_CURRENT) { - expression = {type: TOK_CURRENT}; - this._advance(); - } else { - expression = this.expression(0); - } - args.push(expression); - } - this._match(TOK_RPAREN); - return args[0]; - default: - this._errorToken(token); - } - }, - - led: function(tokenName, left) { - var right; - switch(tokenName) { - case TOK_DOT: - var rbp = bindingPower.Dot; - if (this._lookahead(0) !== TOK_STAR) { - right = this._parseDotRHS(rbp); - return {type: "Subexpression", children: [left, right]}; - } - // Creating a projection. - this._advance(); - right = this._parseProjectionRHS(rbp); - return {type: "ValueProjection", children: [left, right]}; - case TOK_PIPE: - right = this.expression(bindingPower.Pipe); - return {type: TOK_PIPE, children: [left, right]}; - case TOK_OR: - right = this.expression(bindingPower.Or); - return {type: "OrExpression", children: [left, right]}; - case TOK_AND: - right = this.expression(bindingPower.And); - return {type: "AndExpression", children: [left, right]}; - case TOK_LPAREN: - var name = left.name; - var args = []; - var expression, node; - while (this._lookahead(0) !== TOK_RPAREN) { - if (this._lookahead(0) === TOK_CURRENT) { - expression = {type: TOK_CURRENT}; - this._advance(); - } else { - expression = this.expression(0); - } - if (this._lookahead(0) === TOK_COMMA) { - this._match(TOK_COMMA); - } - args.push(expression); - } - this._match(TOK_RPAREN); - node = {type: "Function", name: name, children: args}; - return node; - case TOK_FILTER: - var condition = this.expression(0); - this._match(TOK_RBRACKET); - if (this._lookahead(0) === TOK_FLATTEN) { - right = {type: "Identity"}; - } else { - right = this._parseProjectionRHS(bindingPower.Filter); - } - return {type: "FilterProjection", children: [left, right, condition]}; - case TOK_FLATTEN: - var leftNode = {type: TOK_FLATTEN, children: [left]}; - var rightNode = this._parseProjectionRHS(bindingPower.Flatten); - return {type: "Projection", children: [leftNode, rightNode]}; - case TOK_EQ: - case TOK_NE: - case TOK_GT: - case TOK_GTE: - case TOK_LT: - case TOK_LTE: - return this._parseComparator(left, tokenName); - case TOK_LBRACKET: - var token = this._lookaheadToken(0); - if (token.type === TOK_NUMBER || token.type === TOK_COLON) { - right = this._parseIndexExpression(); - return this._projectIfSlice(left, right); - } - this._match(TOK_STAR); - this._match(TOK_RBRACKET); - right = this._parseProjectionRHS(bindingPower.Star); - return {type: "Projection", children: [left, right]}; - default: - this._errorToken(this._lookaheadToken(0)); - } - }, - - _match: function(tokenType) { - if (this._lookahead(0) === tokenType) { - this._advance(); - } else { - var t = this._lookaheadToken(0); - var error = new Error("Expected " + tokenType + ", got: " + t.type); - error.name = "ParserError"; - throw error; - } - }, - - _errorToken: function(token) { - var error = new Error("Invalid token (" + - token.type + "): \"" + - token.value + "\""); - error.name = "ParserError"; - throw error; - }, - - - _parseIndexExpression: function() { - if (this._lookahead(0) === TOK_COLON || this._lookahead(1) === TOK_COLON) { - return this._parseSliceExpression(); - } else { - var node = { - type: "Index", - value: this._lookaheadToken(0).value}; - this._advance(); - this._match(TOK_RBRACKET); - return node; - } - }, - - _projectIfSlice: function(left, right) { - var indexExpr = {type: "IndexExpression", children: [left, right]}; - if (right.type === "Slice") { - return { - type: "Projection", - children: [indexExpr, this._parseProjectionRHS(bindingPower.Star)] - }; - } else { - return indexExpr; - } - }, - - _parseSliceExpression: function() { - // [start:end:step] where each part is optional, as well as the last - // colon. - var parts = [null, null, null]; - var index = 0; - var currentToken = this._lookahead(0); - while (currentToken !== TOK_RBRACKET && index < 3) { - if (currentToken === TOK_COLON) { - index++; - this._advance(); - } else if (currentToken === TOK_NUMBER) { - parts[index] = this._lookaheadToken(0).value; - this._advance(); - } else { - var t = this._lookahead(0); - var error = new Error("Syntax error, unexpected token: " + - t.value + "(" + t.type + ")"); - error.name = "Parsererror"; - throw error; - } - currentToken = this._lookahead(0); - } - this._match(TOK_RBRACKET); - return { - type: "Slice", - children: parts - }; - }, - - _parseComparator: function(left, comparator) { - var right = this.expression(bindingPower[comparator]); - return {type: "Comparator", name: comparator, children: [left, right]}; - }, - - _parseDotRHS: function(rbp) { - var lookahead = this._lookahead(0); - var exprTokens = [TOK_UNQUOTEDIDENTIFIER, TOK_QUOTEDIDENTIFIER, TOK_STAR]; - if (exprTokens.indexOf(lookahead) >= 0) { - return this.expression(rbp); - } else if (lookahead === TOK_LBRACKET) { - this._match(TOK_LBRACKET); - return this._parseMultiselectList(); - } else if (lookahead === TOK_LBRACE) { - this._match(TOK_LBRACE); - return this._parseMultiselectHash(); - } - }, - - _parseProjectionRHS: function(rbp) { - var right; - if (bindingPower[this._lookahead(0)] < 10) { - right = {type: "Identity"}; - } else if (this._lookahead(0) === TOK_LBRACKET) { - right = this.expression(rbp); - } else if (this._lookahead(0) === TOK_FILTER) { - right = this.expression(rbp); - } else if (this._lookahead(0) === TOK_DOT) { - this._match(TOK_DOT); - right = this._parseDotRHS(rbp); - } else { - var t = this._lookaheadToken(0); - var error = new Error("Sytanx error, unexpected token: " + - t.value + "(" + t.type + ")"); - error.name = "ParserError"; - throw error; - } - return right; - }, - - _parseMultiselectList: function() { - var expressions = []; - while (this._lookahead(0) !== TOK_RBRACKET) { - var expression = this.expression(0); - expressions.push(expression); - if (this._lookahead(0) === TOK_COMMA) { - this._match(TOK_COMMA); - if (this._lookahead(0) === TOK_RBRACKET) { - throw new Error("Unexpected token Rbracket"); - } - } - } - this._match(TOK_RBRACKET); - return {type: "MultiSelectList", children: expressions}; - }, - - _parseMultiselectHash: function() { - var pairs = []; - var identifierTypes = [TOK_UNQUOTEDIDENTIFIER, TOK_QUOTEDIDENTIFIER]; - var keyToken, keyName, value, node; - for (;;) { - keyToken = this._lookaheadToken(0); - if (identifierTypes.indexOf(keyToken.type) < 0) { - throw new Error("Expecting an identifier token, got: " + - keyToken.type); - } - keyName = keyToken.value; - this._advance(); - this._match(TOK_COLON); - value = this.expression(0); - node = {type: "KeyValuePair", name: keyName, value: value}; - pairs.push(node); - if (this._lookahead(0) === TOK_COMMA) { - this._match(TOK_COMMA); - } else if (this._lookahead(0) === TOK_RBRACE) { - this._match(TOK_RBRACE); - break; - } - } - return {type: "MultiSelectHash", children: pairs}; - } - }; - - - function TreeInterpreter(runtime) { - this.runtime = runtime; - } - - TreeInterpreter.prototype = { - search: function(node, value) { - return this.visit(node, value); - }, - - visit: function(node, value) { - var matched, current, result, first, second, field, left, right, collected, i; - switch (node.type) { - case "Field": - if (value !== null && isObject(value)) { - field = value[node.name]; - if (field === undefined) { - return null; - } else { - return field; - } - } - return null; - case "Subexpression": - result = this.visit(node.children[0], value); - for (i = 1; i < node.children.length; i++) { - result = this.visit(node.children[1], result); - if (result === null) { - return null; - } - } - return result; - case "IndexExpression": - left = this.visit(node.children[0], value); - right = this.visit(node.children[1], left); - return right; - case "Index": - if (!isArray(value)) { - return null; - } - var index = node.value; - if (index < 0) { - index = value.length + index; - } - result = value[index]; - if (result === undefined) { - result = null; - } - return result; - case "Slice": - if (!isArray(value)) { - return null; - } - var sliceParams = node.children.slice(0); - var computed = this.computeSliceParams(value.length, sliceParams); - var start = computed[0]; - var stop = computed[1]; - var step = computed[2]; - result = []; - if (step > 0) { - for (i = start; i < stop; i += step) { - result.push(value[i]); - } - } else { - for (i = start; i > stop; i += step) { - result.push(value[i]); - } - } - return result; - case "Projection": - // Evaluate left child. - var base = this.visit(node.children[0], value); - if (!isArray(base)) { - return null; - } - collected = []; - for (i = 0; i < base.length; i++) { - current = this.visit(node.children[1], base[i]); - if (current !== null) { - collected.push(current); - } - } - return collected; - case "ValueProjection": - // Evaluate left child. - base = this.visit(node.children[0], value); - if (!isObject(base)) { - return null; - } - collected = []; - var values = objValues(base); - for (i = 0; i < values.length; i++) { - current = this.visit(node.children[1], values[i]); - if (current !== null) { - collected.push(current); - } - } - return collected; - case "FilterProjection": - base = this.visit(node.children[0], value); - if (!isArray(base)) { - return null; - } - var filtered = []; - var finalResults = []; - for (i = 0; i < base.length; i++) { - matched = this.visit(node.children[2], base[i]); - if (!isFalse(matched)) { - filtered.push(base[i]); - } - } - for (var j = 0; j < filtered.length; j++) { - current = this.visit(node.children[1], filtered[j]); - if (current !== null) { - finalResults.push(current); - } - } - return finalResults; - case "Comparator": - first = this.visit(node.children[0], value); - second = this.visit(node.children[1], value); - switch(node.name) { - case TOK_EQ: - result = strictDeepEqual(first, second); - break; - case TOK_NE: - result = !strictDeepEqual(first, second); - break; - case TOK_GT: - result = first > second; - break; - case TOK_GTE: - result = first >= second; - break; - case TOK_LT: - result = first < second; - break; - case TOK_LTE: - result = first <= second; - break; - default: - throw new Error("Unknown comparator: " + node.name); - } - return result; - case TOK_FLATTEN: - var original = this.visit(node.children[0], value); - if (!isArray(original)) { - return null; - } - var merged = []; - for (i = 0; i < original.length; i++) { - current = original[i]; - if (isArray(current)) { - merged.push.apply(merged, current); - } else { - merged.push(current); - } - } - return merged; - case "Identity": - return value; - case "MultiSelectList": - if (value === null) { - return null; - } - collected = []; - for (i = 0; i < node.children.length; i++) { - collected.push(this.visit(node.children[i], value)); - } - return collected; - case "MultiSelectHash": - if (value === null) { - return null; - } - collected = {}; - var child; - for (i = 0; i < node.children.length; i++) { - child = node.children[i]; - collected[child.name] = this.visit(child.value, value); - } - return collected; - case "OrExpression": - matched = this.visit(node.children[0], value); - if (isFalse(matched)) { - matched = this.visit(node.children[1], value); - } - return matched; - case "AndExpression": - first = this.visit(node.children[0], value); - - if (isFalse(first) === true) { - return first; - } - return this.visit(node.children[1], value); - case "NotExpression": - first = this.visit(node.children[0], value); - return isFalse(first); - case "Literal": - return node.value; - case TOK_PIPE: - left = this.visit(node.children[0], value); - return this.visit(node.children[1], left); - case TOK_CURRENT: - return value; - case "Function": - var resolvedArgs = []; - for (i = 0; i < node.children.length; i++) { - resolvedArgs.push(this.visit(node.children[i], value)); - } - return this.runtime.callFunction(node.name, resolvedArgs); - case "ExpressionReference": - var refNode = node.children[0]; - // Tag the node with a specific attribute so the type - // checker verify the type. - refNode.jmespathType = TOK_EXPREF; - return refNode; - default: - throw new Error("Unknown node type: " + node.type); - } - }, - - computeSliceParams: function(arrayLength, sliceParams) { - var start = sliceParams[0]; - var stop = sliceParams[1]; - var step = sliceParams[2]; - var computed = [null, null, null]; - if (step === null) { - step = 1; - } else if (step === 0) { - var error = new Error("Invalid slice, step cannot be 0"); - error.name = "RuntimeError"; - throw error; - } - var stepValueNegative = step < 0 ? true : false; - - if (start === null) { - start = stepValueNegative ? arrayLength - 1 : 0; - } else { - start = this.capSliceRange(arrayLength, start, step); - } - - if (stop === null) { - stop = stepValueNegative ? -1 : arrayLength; - } else { - stop = this.capSliceRange(arrayLength, stop, step); - } - computed[0] = start; - computed[1] = stop; - computed[2] = step; - return computed; - }, - - capSliceRange: function(arrayLength, actualValue, step) { - if (actualValue < 0) { - actualValue += arrayLength; - if (actualValue < 0) { - actualValue = step < 0 ? -1 : 0; - } - } else if (actualValue >= arrayLength) { - actualValue = step < 0 ? arrayLength - 1 : arrayLength; - } - return actualValue; - } - - }; - - function Runtime(interpreter) { - this._interpreter = interpreter; - this.functionTable = { - // name: [function, ] - // The can be: - // - // { - // args: [[type1, type2], [type1, type2]], - // variadic: true|false - // } - // - // Each arg in the arg list is a list of valid types - // (if the function is overloaded and supports multiple - // types. If the type is "any" then no type checking - // occurs on the argument. Variadic is optional - // and if not provided is assumed to be false. - abs: {_func: this._functionAbs, _signature: [{types: [TYPE_NUMBER]}]}, - avg: {_func: this._functionAvg, _signature: [{types: [TYPE_ARRAY_NUMBER]}]}, - ceil: {_func: this._functionCeil, _signature: [{types: [TYPE_NUMBER]}]}, - contains: { - _func: this._functionContains, - _signature: [{types: [TYPE_STRING, TYPE_ARRAY]}, - {types: [TYPE_ANY]}]}, - "ends_with": { - _func: this._functionEndsWith, - _signature: [{types: [TYPE_STRING]}, {types: [TYPE_STRING]}]}, - floor: {_func: this._functionFloor, _signature: [{types: [TYPE_NUMBER]}]}, - length: { - _func: this._functionLength, - _signature: [{types: [TYPE_STRING, TYPE_ARRAY, TYPE_OBJECT]}]}, - map: { - _func: this._functionMap, - _signature: [{types: [TYPE_EXPREF]}, {types: [TYPE_ARRAY]}]}, - max: { - _func: this._functionMax, - _signature: [{types: [TYPE_ARRAY_NUMBER, TYPE_ARRAY_STRING]}]}, - "merge": { - _func: this._functionMerge, - _signature: [{types: [TYPE_OBJECT], variadic: true}] - }, - "max_by": { - _func: this._functionMaxBy, - _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] - }, - sum: {_func: this._functionSum, _signature: [{types: [TYPE_ARRAY_NUMBER]}]}, - "starts_with": { - _func: this._functionStartsWith, - _signature: [{types: [TYPE_STRING]}, {types: [TYPE_STRING]}]}, - min: { - _func: this._functionMin, - _signature: [{types: [TYPE_ARRAY_NUMBER, TYPE_ARRAY_STRING]}]}, - "min_by": { - _func: this._functionMinBy, - _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] - }, - type: {_func: this._functionType, _signature: [{types: [TYPE_ANY]}]}, - keys: {_func: this._functionKeys, _signature: [{types: [TYPE_OBJECT]}]}, - values: {_func: this._functionValues, _signature: [{types: [TYPE_OBJECT]}]}, - sort: {_func: this._functionSort, _signature: [{types: [TYPE_ARRAY_STRING, TYPE_ARRAY_NUMBER]}]}, - "sort_by": { - _func: this._functionSortBy, - _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] - }, - join: { - _func: this._functionJoin, - _signature: [ - {types: [TYPE_STRING]}, - {types: [TYPE_ARRAY_STRING]} - ] - }, - reverse: { - _func: this._functionReverse, - _signature: [{types: [TYPE_STRING, TYPE_ARRAY]}]}, - "to_array": {_func: this._functionToArray, _signature: [{types: [TYPE_ANY]}]}, - "to_string": {_func: this._functionToString, _signature: [{types: [TYPE_ANY]}]}, - "to_number": {_func: this._functionToNumber, _signature: [{types: [TYPE_ANY]}]}, - "not_null": { - _func: this._functionNotNull, - _signature: [{types: [TYPE_ANY], variadic: true}] - } - }; - } - - Runtime.prototype = { - callFunction: function(name, resolvedArgs) { - var functionEntry = this.functionTable[name]; - if (functionEntry === undefined) { - throw new Error("Unknown function: " + name + "()"); - } - this._validateArgs(name, resolvedArgs, functionEntry._signature); - return functionEntry._func.call(this, resolvedArgs); - }, - - _validateArgs: function(name, args, signature) { - // Validating the args requires validating - // the correct arity and the correct type of each arg. - // If the last argument is declared as variadic, then we need - // a minimum number of args to be required. Otherwise it has to - // be an exact amount. - var pluralized; - if (signature[signature.length - 1].variadic) { - if (args.length < signature.length) { - pluralized = signature.length === 1 ? " argument" : " arguments"; - throw new Error("ArgumentError: " + name + "() " + - "takes at least" + signature.length + pluralized + - " but received " + args.length); - } - } else if (args.length !== signature.length) { - pluralized = signature.length === 1 ? " argument" : " arguments"; - throw new Error("ArgumentError: " + name + "() " + - "takes " + signature.length + pluralized + - " but received " + args.length); - } - var currentSpec; - var actualType; - var typeMatched; - for (var i = 0; i < signature.length; i++) { - typeMatched = false; - currentSpec = signature[i].types; - actualType = this._getTypeName(args[i]); - for (var j = 0; j < currentSpec.length; j++) { - if (this._typeMatches(actualType, currentSpec[j], args[i])) { - typeMatched = true; - break; - } - } - if (!typeMatched) { - var expected = currentSpec - .map(function(typeIdentifier) { - return TYPE_NAME_TABLE[typeIdentifier]; - }) - .join(','); - throw new Error("TypeError: " + name + "() " + - "expected argument " + (i + 1) + - " to be type " + expected + - " but received type " + - TYPE_NAME_TABLE[actualType] + " instead."); - } - } - }, - - _typeMatches: function(actual, expected, argValue) { - if (expected === TYPE_ANY) { - return true; - } - if (expected === TYPE_ARRAY_STRING || - expected === TYPE_ARRAY_NUMBER || - expected === TYPE_ARRAY) { - // The expected type can either just be array, - // or it can require a specific subtype (array of numbers). - // - // The simplest case is if "array" with no subtype is specified. - if (expected === TYPE_ARRAY) { - return actual === TYPE_ARRAY; - } else if (actual === TYPE_ARRAY) { - // Otherwise we need to check subtypes. - // I think this has potential to be improved. - var subtype; - if (expected === TYPE_ARRAY_NUMBER) { - subtype = TYPE_NUMBER; - } else if (expected === TYPE_ARRAY_STRING) { - subtype = TYPE_STRING; - } - for (var i = 0; i < argValue.length; i++) { - if (!this._typeMatches( - this._getTypeName(argValue[i]), subtype, - argValue[i])) { - return false; - } - } - return true; - } - } else { - return actual === expected; - } - }, - _getTypeName: function(obj) { - switch (Object.prototype.toString.call(obj)) { - case "[object String]": - return TYPE_STRING; - case "[object Number]": - return TYPE_NUMBER; - case "[object Array]": - return TYPE_ARRAY; - case "[object Boolean]": - return TYPE_BOOLEAN; - case "[object Null]": - return TYPE_NULL; - case "[object Object]": - // Check if it's an expref. If it has, it's been - // tagged with a jmespathType attr of 'Expref'; - if (obj.jmespathType === TOK_EXPREF) { - return TYPE_EXPREF; - } else { - return TYPE_OBJECT; - } - } - }, - - _functionStartsWith: function(resolvedArgs) { - return resolvedArgs[0].lastIndexOf(resolvedArgs[1]) === 0; - }, - - _functionEndsWith: function(resolvedArgs) { - var searchStr = resolvedArgs[0]; - var suffix = resolvedArgs[1]; - return searchStr.indexOf(suffix, searchStr.length - suffix.length) !== -1; - }, - - _functionReverse: function(resolvedArgs) { - var typeName = this._getTypeName(resolvedArgs[0]); - if (typeName === TYPE_STRING) { - var originalStr = resolvedArgs[0]; - var reversedStr = ""; - for (var i = originalStr.length - 1; i >= 0; i--) { - reversedStr += originalStr[i]; - } - return reversedStr; - } else { - var reversedArray = resolvedArgs[0].slice(0); - reversedArray.reverse(); - return reversedArray; - } - }, - - _functionAbs: function(resolvedArgs) { - return Math.abs(resolvedArgs[0]); - }, - - _functionCeil: function(resolvedArgs) { - return Math.ceil(resolvedArgs[0]); - }, - - _functionAvg: function(resolvedArgs) { - var sum = 0; - var inputArray = resolvedArgs[0]; - for (var i = 0; i < inputArray.length; i++) { - sum += inputArray[i]; - } - return sum / inputArray.length; - }, - - _functionContains: function(resolvedArgs) { - return resolvedArgs[0].indexOf(resolvedArgs[1]) >= 0; - }, - - _functionFloor: function(resolvedArgs) { - return Math.floor(resolvedArgs[0]); - }, - - _functionLength: function(resolvedArgs) { - if (!isObject(resolvedArgs[0])) { - return resolvedArgs[0].length; - } else { - // As far as I can tell, there's no way to get the length - // of an object without O(n) iteration through the object. - return Object.keys(resolvedArgs[0]).length; - } - }, - - _functionMap: function(resolvedArgs) { - var mapped = []; - var interpreter = this._interpreter; - var exprefNode = resolvedArgs[0]; - var elements = resolvedArgs[1]; - for (var i = 0; i < elements.length; i++) { - mapped.push(interpreter.visit(exprefNode, elements[i])); - } - return mapped; - }, - - _functionMerge: function(resolvedArgs) { - var merged = {}; - for (var i = 0; i < resolvedArgs.length; i++) { - var current = resolvedArgs[i]; - for (var key in current) { - merged[key] = current[key]; - } - } - return merged; - }, - - _functionMax: function(resolvedArgs) { - if (resolvedArgs[0].length > 0) { - var typeName = this._getTypeName(resolvedArgs[0][0]); - if (typeName === TYPE_NUMBER) { - return Math.max.apply(Math, resolvedArgs[0]); - } else { - var elements = resolvedArgs[0]; - var maxElement = elements[0]; - for (var i = 1; i < elements.length; i++) { - if (maxElement.localeCompare(elements[i]) < 0) { - maxElement = elements[i]; - } - } - return maxElement; - } - } else { - return null; - } - }, - - _functionMin: function(resolvedArgs) { - if (resolvedArgs[0].length > 0) { - var typeName = this._getTypeName(resolvedArgs[0][0]); - if (typeName === TYPE_NUMBER) { - return Math.min.apply(Math, resolvedArgs[0]); - } else { - var elements = resolvedArgs[0]; - var minElement = elements[0]; - for (var i = 1; i < elements.length; i++) { - if (elements[i].localeCompare(minElement) < 0) { - minElement = elements[i]; - } - } - return minElement; - } - } else { - return null; - } - }, - - _functionSum: function(resolvedArgs) { - var sum = 0; - var listToSum = resolvedArgs[0]; - for (var i = 0; i < listToSum.length; i++) { - sum += listToSum[i]; - } - return sum; - }, - - _functionType: function(resolvedArgs) { - switch (this._getTypeName(resolvedArgs[0])) { - case TYPE_NUMBER: - return "number"; - case TYPE_STRING: - return "string"; - case TYPE_ARRAY: - return "array"; - case TYPE_OBJECT: - return "object"; - case TYPE_BOOLEAN: - return "boolean"; - case TYPE_EXPREF: - return "expref"; - case TYPE_NULL: - return "null"; - } - }, - - _functionKeys: function(resolvedArgs) { - return Object.keys(resolvedArgs[0]); - }, - - _functionValues: function(resolvedArgs) { - var obj = resolvedArgs[0]; - var keys = Object.keys(obj); - var values = []; - for (var i = 0; i < keys.length; i++) { - values.push(obj[keys[i]]); - } - return values; - }, - - _functionJoin: function(resolvedArgs) { - var joinChar = resolvedArgs[0]; - var listJoin = resolvedArgs[1]; - return listJoin.join(joinChar); - }, - - _functionToArray: function(resolvedArgs) { - if (this._getTypeName(resolvedArgs[0]) === TYPE_ARRAY) { - return resolvedArgs[0]; - } else { - return [resolvedArgs[0]]; - } - }, - - _functionToString: function(resolvedArgs) { - if (this._getTypeName(resolvedArgs[0]) === TYPE_STRING) { - return resolvedArgs[0]; - } else { - return JSON.stringify(resolvedArgs[0]); - } - }, - - _functionToNumber: function(resolvedArgs) { - var typeName = this._getTypeName(resolvedArgs[0]); - var convertedValue; - if (typeName === TYPE_NUMBER) { - return resolvedArgs[0]; - } else if (typeName === TYPE_STRING) { - convertedValue = +resolvedArgs[0]; - if (!isNaN(convertedValue)) { - return convertedValue; - } - } - return null; - }, - - _functionNotNull: function(resolvedArgs) { - for (var i = 0; i < resolvedArgs.length; i++) { - if (this._getTypeName(resolvedArgs[i]) !== TYPE_NULL) { - return resolvedArgs[i]; - } - } - return null; - }, - - _functionSort: function(resolvedArgs) { - var sortedArray = resolvedArgs[0].slice(0); - sortedArray.sort(); - return sortedArray; - }, - - _functionSortBy: function(resolvedArgs) { - var sortedArray = resolvedArgs[0].slice(0); - if (sortedArray.length === 0) { - return sortedArray; - } - var interpreter = this._interpreter; - var exprefNode = resolvedArgs[1]; - var requiredType = this._getTypeName( - interpreter.visit(exprefNode, sortedArray[0])); - if ([TYPE_NUMBER, TYPE_STRING].indexOf(requiredType) < 0) { - throw new Error("TypeError"); - } - var that = this; - // In order to get a stable sort out of an unstable - // sort algorithm, we decorate/sort/undecorate (DSU) - // by creating a new list of [index, element] pairs. - // In the cmp function, if the evaluated elements are - // equal, then the index will be used as the tiebreaker. - // After the decorated list has been sorted, it will be - // undecorated to extract the original elements. - var decorated = []; - for (var i = 0; i < sortedArray.length; i++) { - decorated.push([i, sortedArray[i]]); - } - decorated.sort(function(a, b) { - var exprA = interpreter.visit(exprefNode, a[1]); - var exprB = interpreter.visit(exprefNode, b[1]); - if (that._getTypeName(exprA) !== requiredType) { - throw new Error( - "TypeError: expected " + requiredType + ", received " + - that._getTypeName(exprA)); - } else if (that._getTypeName(exprB) !== requiredType) { - throw new Error( - "TypeError: expected " + requiredType + ", received " + - that._getTypeName(exprB)); - } - if (exprA > exprB) { - return 1; - } else if (exprA < exprB) { - return -1; - } else { - // If they're equal compare the items by their - // order to maintain relative order of equal keys - // (i.e. to get a stable sort). - return a[0] - b[0]; - } - }); - // Undecorate: extract out the original list elements. - for (var j = 0; j < decorated.length; j++) { - sortedArray[j] = decorated[j][1]; - } - return sortedArray; - }, - - _functionMaxBy: function(resolvedArgs) { - var exprefNode = resolvedArgs[1]; - var resolvedArray = resolvedArgs[0]; - var keyFunction = this.createKeyFunction(exprefNode, [TYPE_NUMBER, TYPE_STRING]); - var maxNumber = -Infinity; - var maxRecord; - var current; - for (var i = 0; i < resolvedArray.length; i++) { - current = keyFunction(resolvedArray[i]); - if (current > maxNumber) { - maxNumber = current; - maxRecord = resolvedArray[i]; - } - } - return maxRecord; - }, - - _functionMinBy: function(resolvedArgs) { - var exprefNode = resolvedArgs[1]; - var resolvedArray = resolvedArgs[0]; - var keyFunction = this.createKeyFunction(exprefNode, [TYPE_NUMBER, TYPE_STRING]); - var minNumber = Infinity; - var minRecord; - var current; - for (var i = 0; i < resolvedArray.length; i++) { - current = keyFunction(resolvedArray[i]); - if (current < minNumber) { - minNumber = current; - minRecord = resolvedArray[i]; - } - } - return minRecord; - }, - - createKeyFunction: function(exprefNode, allowedTypes) { - var that = this; - var interpreter = this._interpreter; - var keyFunc = function(x) { - var current = interpreter.visit(exprefNode, x); - if (allowedTypes.indexOf(that._getTypeName(current)) < 0) { - var msg = "TypeError: expected one of " + allowedTypes + - ", received " + that._getTypeName(current); - throw new Error(msg); - } - return current; - }; - return keyFunc; - } - - }; - - function compile(stream) { - var parser = new Parser(); - var ast = parser.parse(stream); - return ast; - } - - function tokenize(stream) { - var lexer = new Lexer(); - return lexer.tokenize(stream); - } - - function search(data, expression) { - var parser = new Parser(); - // This needs to be improved. Both the interpreter and runtime depend on - // each other. The runtime needs the interpreter to support exprefs. - // There's likely a clean way to avoid the cyclic dependency. - var runtime = new Runtime(); - var interpreter = new TreeInterpreter(runtime); - runtime._interpreter = interpreter; - var node = parser.parse(expression); - return interpreter.search(node, data); - } - - exports.tokenize = tokenize; - exports.compile = compile; - exports.search = search; - exports.strictDeepEqual = strictDeepEqual; -})( false ? 0 : exports); - - -/***/ }), - -/***/ 80900: -/***/ ((module) => { - -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; - -/** - * Parse or format the given `val`. - * - * Options: - * - * - `long` verbose formatting [false] - * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public - */ - -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; - -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ - -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } -} - -/** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; - } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; - } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; - } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; - } - return ms + 'ms'; -} - -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); - } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); - } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); - } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); - } - return ms + ' ms'; -} - -/** - * Pluralization helper. - */ - -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} - - -/***/ }), - -/***/ 72043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -;(function (sax) { // wrapper for non-node envs - sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } - sax.SAXParser = SAXParser - sax.SAXStream = SAXStream - sax.createStream = createStream - - // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. - // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), - // since that's the earliest that a buffer overrun could occur. This way, checks are - // as rare as required, but as often as necessary to ensure never crossing this bound. - // Furthermore, buffers are only tested at most once per write(), so passing a very - // large string into write() might have undesirable effects, but this is manageable by - // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme - // edge case, result in creating at most one complete copy of the string passed in. - // Set to Infinity to have unlimited buffers. - sax.MAX_BUFFER_LENGTH = 64 * 1024 - - var buffers = [ - 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', - 'procInstName', 'procInstBody', 'entity', 'attribName', - 'attribValue', 'cdata', 'script' - ] - - sax.EVENTS = [ - 'text', - 'processinginstruction', - 'sgmldeclaration', - 'doctype', - 'comment', - 'opentagstart', - 'attribute', - 'opentag', - 'closetag', - 'opencdata', - 'cdata', - 'closecdata', - 'error', - 'end', - 'ready', - 'script', - 'opennamespace', - 'closenamespace' - ] - - function SAXParser (strict, opt) { - if (!(this instanceof SAXParser)) { - return new SAXParser(strict, opt) - } - - var parser = this - clearBuffers(parser) - parser.q = parser.c = '' - parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH - parser.opt = opt || {} - parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags - parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' - parser.tags = [] - parser.closed = parser.closedRoot = parser.sawRoot = false - parser.tag = parser.error = null - parser.strict = !!strict - parser.noscript = !!(strict || parser.opt.noscript) - parser.state = S.BEGIN - parser.strictEntities = parser.opt.strictEntities - parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) - parser.attribList = [] - - // namespaces form a prototype chain. - // it always points at the current tag, - // which protos to its parent tag. - if (parser.opt.xmlns) { - parser.ns = Object.create(rootNS) - } - - // mostly just for error reporting - parser.trackPosition = parser.opt.position !== false - if (parser.trackPosition) { - parser.position = parser.line = parser.column = 0 - } - emit(parser, 'onready') - } - - if (!Object.create) { - Object.create = function (o) { - function F () {} - F.prototype = o - var newf = new F() - return newf - } - } - - if (!Object.keys) { - Object.keys = function (o) { - var a = [] - for (var i in o) if (o.hasOwnProperty(i)) a.push(i) - return a - } - } - - function checkBufferLength (parser) { - var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) - var maxActual = 0 - for (var i = 0, l = buffers.length; i < l; i++) { - var len = parser[buffers[i]].length - if (len > maxAllowed) { - // Text/cdata nodes can get big, and since they're buffered, - // we can get here under normal conditions. - // Avoid issues by emitting the text node now, - // so at least it won't get any bigger. - switch (buffers[i]) { - case 'textNode': - closeText(parser) - break - - case 'cdata': - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - break - - case 'script': - emitNode(parser, 'onscript', parser.script) - parser.script = '' - break - - default: - error(parser, 'Max buffer length exceeded: ' + buffers[i]) - } - } - maxActual = Math.max(maxActual, len) - } - // schedule the next check for the earliest possible buffer overrun. - var m = sax.MAX_BUFFER_LENGTH - maxActual - parser.bufferCheckPosition = m + parser.position - } - - function clearBuffers (parser) { - for (var i = 0, l = buffers.length; i < l; i++) { - parser[buffers[i]] = '' - } - } - - function flushBuffers (parser) { - closeText(parser) - if (parser.cdata !== '') { - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - } - if (parser.script !== '') { - emitNode(parser, 'onscript', parser.script) - parser.script = '' - } - } - - SAXParser.prototype = { - end: function () { end(this) }, - write: write, - resume: function () { this.error = null; return this }, - close: function () { return this.write(null) }, - flush: function () { flushBuffers(this) } - } - - var Stream - try { - Stream = (__nccwpck_require__(12781).Stream) - } catch (ex) { - Stream = function () {} - } - - var streamWraps = sax.EVENTS.filter(function (ev) { - return ev !== 'error' && ev !== 'end' - }) - - function createStream (strict, opt) { - return new SAXStream(strict, opt) - } - - function SAXStream (strict, opt) { - if (!(this instanceof SAXStream)) { - return new SAXStream(strict, opt) - } - - Stream.apply(this) - - this._parser = new SAXParser(strict, opt) - this.writable = true - this.readable = true - - var me = this - - this._parser.onend = function () { - me.emit('end') - } - - this._parser.onerror = function (er) { - me.emit('error', er) - - // if didn't throw, then means error was handled. - // go ahead and clear error, so we can write again. - me._parser.error = null - } - - this._decoder = null - - streamWraps.forEach(function (ev) { - Object.defineProperty(me, 'on' + ev, { - get: function () { - return me._parser['on' + ev] - }, - set: function (h) { - if (!h) { - me.removeAllListeners(ev) - me._parser['on' + ev] = h - return h - } - me.on(ev, h) - }, - enumerable: true, - configurable: false - }) - }) - } - - SAXStream.prototype = Object.create(Stream.prototype, { - constructor: { - value: SAXStream - } - }) - - SAXStream.prototype.write = function (data) { - if (typeof Buffer === 'function' && - typeof Buffer.isBuffer === 'function' && - Buffer.isBuffer(data)) { - if (!this._decoder) { - var SD = (__nccwpck_require__(71576).StringDecoder) - this._decoder = new SD('utf8') - } - data = this._decoder.write(data) - } - - this._parser.write(data.toString()) - this.emit('data', data) - return true - } - - SAXStream.prototype.end = function (chunk) { - if (chunk && chunk.length) { - this.write(chunk) - } - this._parser.end() - return true - } - - SAXStream.prototype.on = function (ev, handler) { - var me = this - if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { - me._parser['on' + ev] = function () { - var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) - args.splice(0, 0, ev) - me.emit.apply(me, args) - } - } - - return Stream.prototype.on.call(me, ev, handler) - } - - // character classes and tokens - var whitespace = '\r\n\t ' - - // this really needs to be replaced with character classes. - // XML allows all manner of ridiculous numbers and digits. - var number = '0124356789' - var letter = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' - - // (Letter | "_" | ":") - var quote = '\'"' - var attribEnd = whitespace + '>' - var CDATA = '[CDATA[' - var DOCTYPE = 'DOCTYPE' - var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' - var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' - var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } - - // turn all the string character sets into character class objects. - whitespace = charClass(whitespace) - number = charClass(number) - letter = charClass(letter) - - // http://www.w3.org/TR/REC-xml/#NT-NameStartChar - // This implementation works on strings, a single character at a time - // as such, it cannot ever support astral-plane characters (10000-EFFFF) - // without a significant breaking change to either this parser, or the - // JavaScript language. Implementation of an emoji-capable xml parser - // is left as an exercise for the reader. - var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - - var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040\.\d-]/ - - var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040\.\d-]/ - - quote = charClass(quote) - attribEnd = charClass(attribEnd) - - function charClass (str) { - return str.split('').reduce(function (s, c) { - s[c] = true - return s - }, {}) - } - - function isRegExp (c) { - return Object.prototype.toString.call(c) === '[object RegExp]' - } - - function is (charclass, c) { - return isRegExp(charclass) ? !!c.match(charclass) : charclass[c] - } - - function not (charclass, c) { - return !is(charclass, c) - } - - var S = 0 - sax.STATE = { - BEGIN: S++, // leading byte order mark or whitespace - BEGIN_WHITESPACE: S++, // leading whitespace - TEXT: S++, // general stuff - TEXT_ENTITY: S++, // & and such. - OPEN_WAKA: S++, // < - SGML_DECL: S++, // - SCRIPT: S++, //