From 1f4c7eaa362d99d5ca3eb97601810c9644712259 Mon Sep 17 00:00:00 2001
From: Brian DeHamer <bdehamer@github.com>
Date: Thu, 9 Feb 2023 12:11:27 -0800
Subject: [PATCH 1/2] feat: add provenance attestation

This adds a new `--provenance` flag to npm for provenance attestation
during `npm publish`

If set to `true`, npm will detect if it is running in GitHub actions and
will generate an appropriate attestation bundle for that environment.

The primary work in this PR was done by
[@bdehamer](https://github.com/bdehamer), with some cleanup and
edge-case handling added by the npm cli team.
---
 DEPENDENCIES.md                               |    6 +
 lib/commands/publish.js                       |    1 +
 lib/utils/config/definitions.js               |    9 +
 node_modules/.gitignore                       |    2 +
 node_modules/sigstore/LICENSE                 |  202 +++
 node_modules/sigstore/bin/sigstore.js         |   17 +
 node_modules/sigstore/dist/ca/format.d.ts     |    5 +
 node_modules/sigstore/dist/ca/format.js       |   14 +
 node_modules/sigstore/dist/ca/index.d.ts      |   14 +
 node_modules/sigstore/dist/ca/index.js        |   23 +
 .../sigstore/dist/ca/verify/chain.d.ts        |    3 +
 node_modules/sigstore/dist/ca/verify/chain.js |   64 +
 .../sigstore/dist/ca/verify/index.d.ts        |    2 +
 node_modules/sigstore/dist/ca/verify/index.js |   21 +
 node_modules/sigstore/dist/ca/verify/sct.d.ts |    3 +
 node_modules/sigstore/dist/ca/verify/sct.js   |   30 +
 .../sigstore/dist/ca/verify/signer.d.ts       |    3 +
 .../sigstore/dist/ca/verify/signer.js         |  131 ++
 node_modules/sigstore/dist/cli/index.d.ts     |    1 +
 node_modules/sigstore/dist/cli/index.js       |  113 ++
 node_modules/sigstore/dist/client/error.d.ts  |   10 +
 node_modules/sigstore/dist/client/error.js    |   21 +
 node_modules/sigstore/dist/client/fulcio.d.ts |   18 +
 node_modules/sigstore/dist/client/fulcio.js   |   53 +
 node_modules/sigstore/dist/client/index.d.ts  |    2 +
 node_modules/sigstore/dist/client/index.js    |   22 +
 node_modules/sigstore/dist/client/rekor.d.ts  |   45 +
 node_modules/sigstore/dist/client/rekor.js    |  115 ++
 node_modules/sigstore/dist/error.d.ts         |   13 +
 node_modules/sigstore/dist/error.js           |   38 +
 node_modules/sigstore/dist/identity/ci.d.ts   |   11 +
 node_modules/sigstore/dist/identity/ci.js     |   65 +
 .../sigstore/dist/identity/index.d.ts         |   25 +
 node_modules/sigstore/dist/identity/index.js  |   46 +
 .../sigstore/dist/identity/issuer.d.ts        |   15 +
 node_modules/sigstore/dist/identity/issuer.js |   53 +
 .../sigstore/dist/identity/oauth.d.ts         |   19 +
 node_modules/sigstore/dist/identity/oauth.js  |  188 +++
 .../sigstore/dist/identity/provider.d.ts      |    3 +
 .../sigstore/dist/identity/provider.js        |    2 +
 node_modules/sigstore/dist/index.d.ts         |    1 +
 node_modules/sigstore/dist/index.js           |   42 +
 node_modules/sigstore/dist/merkle/digest.d.ts |    8 +
 node_modules/sigstore/dist/merkle/digest.js   |   48 +
 node_modules/sigstore/dist/merkle/index.d.ts  |    2 +
 node_modules/sigstore/dist/merkle/index.js    |   22 +
 node_modules/sigstore/dist/merkle/verify.d.ts |    3 +
 node_modules/sigstore/dist/merkle/verify.js   |   78 +
 node_modules/sigstore/dist/sign.d.ts          |   23 +
 node_modules/sigstore/dist/sign.js            |   73 +
 .../sigstore/dist/sigstore-utils.d.ts         |    7 +
 node_modules/sigstore/dist/sigstore-utils.js  |   58 +
 node_modules/sigstore/dist/sigstore.d.ts      |   30 +
 node_modules/sigstore/dist/sigstore.js        |  188 +++
 node_modules/sigstore/dist/tlog/format.d.ts   |    6 +
 node_modules/sigstore/dist/tlog/format.js     |  105 ++
 node_modules/sigstore/dist/tlog/index.d.ts    |   21 +
 node_modules/sigstore/dist/tlog/index.js      |   67 +
 .../types/__generated__/hashedrekord.d.ts     |   50 +
 .../tlog/types/__generated__/hashedrekord.js  |    8 +
 .../dist/tlog/types/__generated__/intoto.d.ts |  131 ++
 .../dist/tlog/types/__generated__/intoto.js   |    8 +
 .../sigstore/dist/tlog/types/index.d.ts       |   38 +
 .../sigstore/dist/tlog/types/index.js         |    5 +
 .../sigstore/dist/tlog/verify/body.d.ts       |    2 +
 .../sigstore/dist/tlog/verify/body.js         |  113 ++
 .../sigstore/dist/tlog/verify/index.d.ts      |    2 +
 .../sigstore/dist/tlog/verify/index.js        |   75 +
 .../sigstore/dist/tlog/verify/set.d.ts        |    2 +
 node_modules/sigstore/dist/tlog/verify/set.js |   67 +
 node_modules/sigstore/dist/tuf/index.d.ts     |    2 +
 node_modules/sigstore/dist/tuf/index.js       |   76 +
 node_modules/sigstore/dist/tuf/trustroot.d.ts |   11 +
 node_modules/sigstore/dist/tuf/trustroot.js   |  163 ++
 .../sigstore/dist/types/signature.d.ts        |   16 +
 node_modules/sigstore/dist/types/signature.js |   15 +
 .../sigstore/__generated__/envelope.d.ts      |   46 +
 .../types/sigstore/__generated__/envelope.js  |   89 ++
 .../google/api/field_behavior.d.ts            |   52 +
 .../google/api/field_behavior.js              |  119 ++
 .../google/protobuf/descriptor.d.ts           |  939 ++++++++++++
 .../google/protobuf/descriptor.js             | 1308 +++++++++++++++++
 .../google/protobuf/timestamp.d.ts            |  110 ++
 .../google/protobuf/timestamp.js              |   24 +
 .../__generated__/sigstore_bundle.d.ts        |   72 +
 .../sigstore/__generated__/sigstore_bundle.js |  106 ++
 .../__generated__/sigstore_common.d.ts        |  228 +++
 .../sigstore/__generated__/sigstore_common.js |  457 ++++++
 .../__generated__/sigstore_rekor.d.ts         |  129 ++
 .../sigstore/__generated__/sigstore_rekor.js  |  167 +++
 .../__generated__/sigstore_trustroot.d.ts     |   89 ++
 .../__generated__/sigstore_trustroot.js       |  103 ++
 .../__generated__/sigstore_verification.d.ts  |  156 ++
 .../__generated__/sigstore_verification.js    |  273 ++++
 .../sigstore/dist/types/sigstore/index.d.ts   |   46 +
 .../sigstore/dist/types/sigstore/index.js     |  144 ++
 .../dist/types/sigstore/serialized.d.ts       |   74 +
 .../dist/types/sigstore/serialized.js         |    2 +
 .../dist/types/sigstore/validate.d.ts         |   16 +
 .../sigstore/dist/types/sigstore/validate.js  |   88 ++
 node_modules/sigstore/dist/types/utility.d.ts |   14 +
 node_modules/sigstore/dist/types/utility.js   |   18 +
 node_modules/sigstore/dist/util/crypto.d.ts   |   10 +
 node_modules/sigstore/dist/util/crypto.js     |   64 +
 node_modules/sigstore/dist/util/dsse.d.ts     |    2 +
 node_modules/sigstore/dist/util/dsse.js       |   25 +
 node_modules/sigstore/dist/util/encoding.d.ts |    6 +
 node_modules/sigstore/dist/util/encoding.js   |   46 +
 node_modules/sigstore/dist/util/index.d.ts    |    8 +
 node_modules/sigstore/dist/util/index.js      |   49 +
 node_modules/sigstore/dist/util/json.d.ts     |    1 +
 node_modules/sigstore/dist/util/json.js       |   61 +
 node_modules/sigstore/dist/util/oidc.d.ts     |    1 +
 node_modules/sigstore/dist/util/oidc.js       |   54 +
 node_modules/sigstore/dist/util/pem.d.ts      |    4 +
 node_modules/sigstore/dist/util/pem.js        |   65 +
 node_modules/sigstore/dist/util/promise.d.ts  |    1 +
 node_modules/sigstore/dist/util/promise.js    |   27 +
 node_modules/sigstore/dist/util/stream.d.ts   |   24 +
 node_modules/sigstore/dist/util/stream.js     |  116 ++
 node_modules/sigstore/dist/util/ua.d.ts       |    1 +
 node_modules/sigstore/dist/util/ua.js         |   33 +
 node_modules/sigstore/dist/verify.d.ts        |   13 +
 node_modules/sigstore/dist/verify.js          |  142 ++
 .../sigstore/dist/x509/asn1/dump.d.ts         |    2 +
 node_modules/sigstore/dist/x509/asn1/dump.js  |   97 ++
 .../sigstore/dist/x509/asn1/error.d.ts        |    4 +
 node_modules/sigstore/dist/x509/asn1/error.js |   24 +
 .../sigstore/dist/x509/asn1/length.d.ts       |    4 +
 .../sigstore/dist/x509/asn1/length.js         |   63 +
 node_modules/sigstore/dist/x509/asn1/obj.d.ts |   18 +
 node_modules/sigstore/dist/x509/asn1/obj.js   |  166 +++
 .../sigstore/dist/x509/asn1/parse.d.ts        |    7 +
 node_modules/sigstore/dist/x509/asn1/parse.js |  125 ++
 node_modules/sigstore/dist/x509/asn1/tag.d.ts |   28 +
 node_modules/sigstore/dist/x509/asn1/tag.js   |   86 ++
 node_modules/sigstore/dist/x509/cert.d.ts     |   48 +
 node_modules/sigstore/dist/x509/cert.js       |  241 +++
 node_modules/sigstore/dist/x509/ext.d.ts      |   41 +
 node_modules/sigstore/dist/x509/ext.js        |  157 ++
 node_modules/sigstore/dist/x509/sct.d.ts      |   26 +
 node_modules/sigstore/dist/x509/sct.js        |  101 ++
 node_modules/sigstore/dist/x509/verify.d.ts   |    8 +
 node_modules/sigstore/dist/x509/verify.js     |  159 ++
 node_modules/sigstore/package.json            |   59 +
 node_modules/sigstore/store/map.json          |   19 +
 .../store/public-good-instance-root.json      |  156 ++
 node_modules/tuf-js/LICENSE                   |   21 +
 node_modules/tuf-js/dist/error.d.ts           |   30 +
 node_modules/tuf-js/dist/error.js             |   62 +
 node_modules/tuf-js/dist/fetcher.d.ts         |   19 +
 node_modules/tuf-js/dist/fetcher.js           |   81 +
 node_modules/tuf-js/dist/index.d.ts           |    3 +
 node_modules/tuf-js/dist/index.js             |    9 +
 node_modules/tuf-js/dist/models/base.d.ts     |   30 +
 node_modules/tuf-js/dist/models/base.js       |   71 +
 .../tuf-js/dist/models/delegations.d.ts       |   32 +
 .../tuf-js/dist/models/delegations.js         |  115 ++
 node_modules/tuf-js/dist/models/file.d.ts     |   40 +
 node_modules/tuf-js/dist/models/file.js       |  183 +++
 node_modules/tuf-js/dist/models/index.d.ts    |    5 +
 node_modules/tuf-js/dist/models/index.js      |   13 +
 node_modules/tuf-js/dist/models/key.d.ts      |   21 +
 node_modules/tuf-js/dist/models/key.js        |  109 ++
 node_modules/tuf-js/dist/models/metadata.d.ts |   45 +
 node_modules/tuf-js/dist/models/metadata.js   |  139 ++
 node_modules/tuf-js/dist/models/role.d.ts     |  103 ++
 node_modules/tuf-js/dist/models/role.js       |  298 ++++
 node_modules/tuf-js/dist/models/root.d.ts     |   28 +
 node_modules/tuf-js/dist/models/root.js       |  107 ++
 .../tuf-js/dist/models/signature.d.ts         |   19 +
 node_modules/tuf-js/dist/models/signature.js  |   32 +
 node_modules/tuf-js/dist/models/snapshot.d.ts |   23 +
 node_modules/tuf-js/dist/models/snapshot.js   |   71 +
 node_modules/tuf-js/dist/models/targets.d.ts  |   19 +
 node_modules/tuf-js/dist/models/targets.js    |   89 ++
 .../tuf-js/dist/models/timestamp.d.ts         |   21 +
 node_modules/tuf-js/dist/models/timestamp.js  |   58 +
 node_modules/tuf-js/dist/store.d.ts           |   19 +
 node_modules/tuf-js/dist/store.js             |  209 +++
 node_modules/tuf-js/dist/updater.d.ts         |   33 +
 node_modules/tuf-js/dist/updater.js           |  306 ++++
 node_modules/tuf-js/dist/utils/config.d.ts    |   12 +
 node_modules/tuf-js/dist/utils/config.js      |   14 +
 node_modules/tuf-js/dist/utils/guard.d.ts     |    8 +
 node_modules/tuf-js/dist/utils/guard.js       |   39 +
 node_modules/tuf-js/dist/utils/index.d.ts     |    5 +
 node_modules/tuf-js/dist/utils/index.js       |   31 +
 node_modules/tuf-js/dist/utils/json.d.ts      |    2 +
 node_modules/tuf-js/dist/utils/json.js        |   62 +
 node_modules/tuf-js/dist/utils/key.d.ts       |    9 +
 node_modules/tuf-js/dist/utils/key.js         |  143 ++
 node_modules/tuf-js/dist/utils/oid.d.ts       |    2 +
 node_modules/tuf-js/dist/utils/oid.js         |   27 +
 node_modules/tuf-js/dist/utils/signer.d.ts    |    3 +
 node_modules/tuf-js/dist/utils/signer.js      |   13 +
 node_modules/tuf-js/dist/utils/tmpfile.d.ts   |    3 +
 node_modules/tuf-js/dist/utils/tmpfile.js     |   25 +
 node_modules/tuf-js/dist/utils/types.d.ts     |   10 +
 node_modules/tuf-js/dist/utils/types.js       |   10 +
 node_modules/tuf-js/package.json              |   58 +
 package-lock.json                             |   29 +
 .../test/lib/commands/config.js.test.cjs      |    2 +
 tap-snapshots/test/lib/docs.js.test.cjs       |   12 +-
 workspaces/libnpmpublish/lib/provenance.js    |   70 +
 workspaces/libnpmpublish/lib/publish.js       |   45 +-
 workspaces/libnpmpublish/package.json         |    2 +
 .../test/fixtures/bad-bundle.json             |    1 +
 .../fixtures/bad-dsse-payload-bundle.json     |   51 +
 .../digest-mismatch-provenance-bundle.json    |   51 +
 .../fixtures/invalid-signature-bundle.json    |   51 +
 .../multi-subject-provenance-bundle.json      |   51 +
 .../no-provenance-envelope-bundle.json        |   41 +
 .../no-provenance-subject-bundle.json         |   51 +
 .../test/fixtures/valid-bundle.json           |   50 +
 workspaces/libnpmpublish/test/publish.js      |  210 +++
 216 files changed, 13907 insertions(+), 4 deletions(-)
 create mode 100644 node_modules/sigstore/LICENSE
 create mode 100755 node_modules/sigstore/bin/sigstore.js
 create mode 100644 node_modules/sigstore/dist/ca/format.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/format.js
 create mode 100644 node_modules/sigstore/dist/ca/index.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/index.js
 create mode 100644 node_modules/sigstore/dist/ca/verify/chain.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/verify/chain.js
 create mode 100644 node_modules/sigstore/dist/ca/verify/index.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/verify/index.js
 create mode 100644 node_modules/sigstore/dist/ca/verify/sct.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/verify/sct.js
 create mode 100644 node_modules/sigstore/dist/ca/verify/signer.d.ts
 create mode 100644 node_modules/sigstore/dist/ca/verify/signer.js
 create mode 100644 node_modules/sigstore/dist/cli/index.d.ts
 create mode 100644 node_modules/sigstore/dist/cli/index.js
 create mode 100644 node_modules/sigstore/dist/client/error.d.ts
 create mode 100644 node_modules/sigstore/dist/client/error.js
 create mode 100644 node_modules/sigstore/dist/client/fulcio.d.ts
 create mode 100644 node_modules/sigstore/dist/client/fulcio.js
 create mode 100644 node_modules/sigstore/dist/client/index.d.ts
 create mode 100644 node_modules/sigstore/dist/client/index.js
 create mode 100644 node_modules/sigstore/dist/client/rekor.d.ts
 create mode 100644 node_modules/sigstore/dist/client/rekor.js
 create mode 100644 node_modules/sigstore/dist/error.d.ts
 create mode 100644 node_modules/sigstore/dist/error.js
 create mode 100644 node_modules/sigstore/dist/identity/ci.d.ts
 create mode 100644 node_modules/sigstore/dist/identity/ci.js
 create mode 100644 node_modules/sigstore/dist/identity/index.d.ts
 create mode 100644 node_modules/sigstore/dist/identity/index.js
 create mode 100644 node_modules/sigstore/dist/identity/issuer.d.ts
 create mode 100644 node_modules/sigstore/dist/identity/issuer.js
 create mode 100644 node_modules/sigstore/dist/identity/oauth.d.ts
 create mode 100644 node_modules/sigstore/dist/identity/oauth.js
 create mode 100644 node_modules/sigstore/dist/identity/provider.d.ts
 create mode 100644 node_modules/sigstore/dist/identity/provider.js
 create mode 100644 node_modules/sigstore/dist/index.d.ts
 create mode 100644 node_modules/sigstore/dist/index.js
 create mode 100644 node_modules/sigstore/dist/merkle/digest.d.ts
 create mode 100644 node_modules/sigstore/dist/merkle/digest.js
 create mode 100644 node_modules/sigstore/dist/merkle/index.d.ts
 create mode 100644 node_modules/sigstore/dist/merkle/index.js
 create mode 100644 node_modules/sigstore/dist/merkle/verify.d.ts
 create mode 100644 node_modules/sigstore/dist/merkle/verify.js
 create mode 100644 node_modules/sigstore/dist/sign.d.ts
 create mode 100644 node_modules/sigstore/dist/sign.js
 create mode 100644 node_modules/sigstore/dist/sigstore-utils.d.ts
 create mode 100644 node_modules/sigstore/dist/sigstore-utils.js
 create mode 100644 node_modules/sigstore/dist/sigstore.d.ts
 create mode 100644 node_modules/sigstore/dist/sigstore.js
 create mode 100644 node_modules/sigstore/dist/tlog/format.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/format.js
 create mode 100644 node_modules/sigstore/dist/tlog/index.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/index.js
 create mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js
 create mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/intoto.js
 create mode 100644 node_modules/sigstore/dist/tlog/types/index.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/types/index.js
 create mode 100644 node_modules/sigstore/dist/tlog/verify/body.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/verify/body.js
 create mode 100644 node_modules/sigstore/dist/tlog/verify/index.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/verify/index.js
 create mode 100644 node_modules/sigstore/dist/tlog/verify/set.d.ts
 create mode 100644 node_modules/sigstore/dist/tlog/verify/set.js
 create mode 100644 node_modules/sigstore/dist/tuf/index.d.ts
 create mode 100644 node_modules/sigstore/dist/tuf/index.js
 create mode 100644 node_modules/sigstore/dist/tuf/trustroot.d.ts
 create mode 100644 node_modules/sigstore/dist/tuf/trustroot.js
 create mode 100644 node_modules/sigstore/dist/types/signature.d.ts
 create mode 100644 node_modules/sigstore/dist/types/signature.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/index.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/index.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/serialized.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/serialized.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore/validate.d.ts
 create mode 100644 node_modules/sigstore/dist/types/sigstore/validate.js
 create mode 100644 node_modules/sigstore/dist/types/utility.d.ts
 create mode 100644 node_modules/sigstore/dist/types/utility.js
 create mode 100644 node_modules/sigstore/dist/util/crypto.d.ts
 create mode 100644 node_modules/sigstore/dist/util/crypto.js
 create mode 100644 node_modules/sigstore/dist/util/dsse.d.ts
 create mode 100644 node_modules/sigstore/dist/util/dsse.js
 create mode 100644 node_modules/sigstore/dist/util/encoding.d.ts
 create mode 100644 node_modules/sigstore/dist/util/encoding.js
 create mode 100644 node_modules/sigstore/dist/util/index.d.ts
 create mode 100644 node_modules/sigstore/dist/util/index.js
 create mode 100644 node_modules/sigstore/dist/util/json.d.ts
 create mode 100644 node_modules/sigstore/dist/util/json.js
 create mode 100644 node_modules/sigstore/dist/util/oidc.d.ts
 create mode 100644 node_modules/sigstore/dist/util/oidc.js
 create mode 100644 node_modules/sigstore/dist/util/pem.d.ts
 create mode 100644 node_modules/sigstore/dist/util/pem.js
 create mode 100644 node_modules/sigstore/dist/util/promise.d.ts
 create mode 100644 node_modules/sigstore/dist/util/promise.js
 create mode 100644 node_modules/sigstore/dist/util/stream.d.ts
 create mode 100644 node_modules/sigstore/dist/util/stream.js
 create mode 100644 node_modules/sigstore/dist/util/ua.d.ts
 create mode 100644 node_modules/sigstore/dist/util/ua.js
 create mode 100644 node_modules/sigstore/dist/verify.d.ts
 create mode 100644 node_modules/sigstore/dist/verify.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/dump.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/dump.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/error.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/error.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/length.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/length.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/obj.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/obj.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/parse.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/parse.js
 create mode 100644 node_modules/sigstore/dist/x509/asn1/tag.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/asn1/tag.js
 create mode 100644 node_modules/sigstore/dist/x509/cert.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/cert.js
 create mode 100644 node_modules/sigstore/dist/x509/ext.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/ext.js
 create mode 100644 node_modules/sigstore/dist/x509/sct.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/sct.js
 create mode 100644 node_modules/sigstore/dist/x509/verify.d.ts
 create mode 100644 node_modules/sigstore/dist/x509/verify.js
 create mode 100644 node_modules/sigstore/package.json
 create mode 100644 node_modules/sigstore/store/map.json
 create mode 100644 node_modules/sigstore/store/public-good-instance-root.json
 create mode 100644 node_modules/tuf-js/LICENSE
 create mode 100644 node_modules/tuf-js/dist/error.d.ts
 create mode 100644 node_modules/tuf-js/dist/error.js
 create mode 100644 node_modules/tuf-js/dist/fetcher.d.ts
 create mode 100644 node_modules/tuf-js/dist/fetcher.js
 create mode 100644 node_modules/tuf-js/dist/index.d.ts
 create mode 100644 node_modules/tuf-js/dist/index.js
 create mode 100644 node_modules/tuf-js/dist/models/base.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/base.js
 create mode 100644 node_modules/tuf-js/dist/models/delegations.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/delegations.js
 create mode 100644 node_modules/tuf-js/dist/models/file.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/file.js
 create mode 100644 node_modules/tuf-js/dist/models/index.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/index.js
 create mode 100644 node_modules/tuf-js/dist/models/key.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/key.js
 create mode 100644 node_modules/tuf-js/dist/models/metadata.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/metadata.js
 create mode 100644 node_modules/tuf-js/dist/models/role.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/role.js
 create mode 100644 node_modules/tuf-js/dist/models/root.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/root.js
 create mode 100644 node_modules/tuf-js/dist/models/signature.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/signature.js
 create mode 100644 node_modules/tuf-js/dist/models/snapshot.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/snapshot.js
 create mode 100644 node_modules/tuf-js/dist/models/targets.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/targets.js
 create mode 100644 node_modules/tuf-js/dist/models/timestamp.d.ts
 create mode 100644 node_modules/tuf-js/dist/models/timestamp.js
 create mode 100644 node_modules/tuf-js/dist/store.d.ts
 create mode 100644 node_modules/tuf-js/dist/store.js
 create mode 100644 node_modules/tuf-js/dist/updater.d.ts
 create mode 100644 node_modules/tuf-js/dist/updater.js
 create mode 100644 node_modules/tuf-js/dist/utils/config.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/config.js
 create mode 100644 node_modules/tuf-js/dist/utils/guard.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/guard.js
 create mode 100644 node_modules/tuf-js/dist/utils/index.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/index.js
 create mode 100644 node_modules/tuf-js/dist/utils/json.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/json.js
 create mode 100644 node_modules/tuf-js/dist/utils/key.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/key.js
 create mode 100644 node_modules/tuf-js/dist/utils/oid.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/oid.js
 create mode 100644 node_modules/tuf-js/dist/utils/signer.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/signer.js
 create mode 100644 node_modules/tuf-js/dist/utils/tmpfile.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/tmpfile.js
 create mode 100644 node_modules/tuf-js/dist/utils/types.d.ts
 create mode 100644 node_modules/tuf-js/dist/utils/types.js
 create mode 100644 node_modules/tuf-js/package.json
 create mode 100644 workspaces/libnpmpublish/lib/provenance.js
 create mode 100644 workspaces/libnpmpublish/test/fixtures/bad-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/bad-dsse-payload-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/digest-mismatch-provenance-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/invalid-signature-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/multi-subject-provenance-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/no-provenance-envelope-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/no-provenance-subject-bundle.json
 create mode 100644 workspaces/libnpmpublish/test/fixtures/valid-bundle.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index a79ede9f6dd5d..f94175fbbb135 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -398,6 +398,7 @@ graph LR;
   libnpmpack-->pacote;
   libnpmpack-->spawk;
   libnpmpack-->tap;
+  libnpmpublish-->ci-info;
   libnpmpublish-->lodash.clonedeep;
   libnpmpublish-->nock;
   libnpmpublish-->normalize-package-data;
@@ -407,6 +408,7 @@ graph LR;
   libnpmpublish-->npmcli-mock-registry["@npmcli/mock-registry"];
   libnpmpublish-->npmcli-template-oss["@npmcli/template-oss"];
   libnpmpublish-->semver;
+  libnpmpublish-->sigstore;
   libnpmpublish-->ssri;
   libnpmpublish-->tap;
   libnpmsearch-->nock;
@@ -734,6 +736,8 @@ graph LR;
   readable-stream-->util-deprecate;
   rimraf-->glob;
   semver-->lru-cache;
+  sigstore-->make-fetch-happen;
+  sigstore-->tuf-js;
   socks-->ip;
   socks-->smart-buffer;
   socks-proxy-agent-->agent-base;
@@ -756,6 +760,8 @@ graph LR;
   tar-->minizlib;
   tar-->mkdirp;
   tar-->yallist;
+  tuf-js-->make-fetch-happen;
+  tuf-js-->minimatch;
   unique-filename-->unique-slug;
   unique-slug-->imurmurhash;
   validate-npm-package-license-->spdx-correct;
diff --git a/lib/commands/publish.js b/lib/commands/publish.js
index 76faea9457f74..8befbc5ca34ce 100644
--- a/lib/commands/publish.js
+++ b/lib/commands/publish.js
@@ -35,6 +35,7 @@ class Publish extends BaseCommand {
     'workspace',
     'workspaces',
     'include-workspace-root',
+    'provenance',
   ]
 
   static usage = ['<package-spec>']
diff --git a/lib/utils/config/definitions.js b/lib/utils/config/definitions.js
index 77a88d79ae6e6..4b9eb1f64cbbd 100644
--- a/lib/utils/config/definitions.js
+++ b/lib/utils/config/definitions.js
@@ -1620,6 +1620,15 @@ define('progress', {
   },
 })
 
+define('provenance', {
+  default: false,
+  type: Boolean,
+  description: `
+    Indicates that a provenance statement should be generated.
+  `,
+  flatten,
+})
+
 define('proxy', {
   default: null,
   type: [null, false, url], // allow proxy to be disabled explicitly
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 561ce139d844d..73b8929986036 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -241,6 +241,7 @@
 !/semver/node_modules/lru-cache
 !/set-blocking
 !/signal-exit
+!/sigstore
 !/smart-buffer
 !/socks-proxy-agent
 !/socks
@@ -263,6 +264,7 @@
 !/text-table
 !/tiny-relative-date
 !/treeverse
+!/tuf-js
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/sigstore/LICENSE b/node_modules/sigstore/LICENSE
new file mode 100644
index 0000000000000..d645695673349
--- /dev/null
+++ b/node_modules/sigstore/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/sigstore/bin/sigstore.js b/node_modules/sigstore/bin/sigstore.js
new file mode 100755
index 0000000000000..a07b7bdc1af95
--- /dev/null
+++ b/node_modules/sigstore/bin/sigstore.js
@@ -0,0 +1,17 @@
+#!/usr/bin/env node
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+require('../dist/cli').processArgv();
diff --git a/node_modules/sigstore/dist/ca/format.d.ts b/node_modules/sigstore/dist/ca/format.d.ts
new file mode 100644
index 0000000000000..185b4c260afb4
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/format.d.ts
@@ -0,0 +1,5 @@
+/// <reference types="node" />
+/// <reference types="node" />
+import { KeyObject } from 'crypto';
+import { CertificateRequest } from '../client/fulcio';
+export declare function toCertificateRequest(publicKey: KeyObject, challenge: Buffer): CertificateRequest;
diff --git a/node_modules/sigstore/dist/ca/format.js b/node_modules/sigstore/dist/ca/format.js
new file mode 100644
index 0000000000000..f168cbe315479
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/format.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toCertificateRequest = void 0;
+function toCertificateRequest(publicKey, challenge) {
+    return {
+        publicKey: {
+            content: publicKey
+                .export({ type: 'spki', format: 'der' })
+                .toString('base64'),
+        },
+        signedEmailAddress: challenge.toString('base64'),
+    };
+}
+exports.toCertificateRequest = toCertificateRequest;
diff --git a/node_modules/sigstore/dist/ca/index.d.ts b/node_modules/sigstore/dist/ca/index.d.ts
new file mode 100644
index 0000000000000..0ee0bf4ae67b2
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/index.d.ts
@@ -0,0 +1,14 @@
+/// <reference types="node" />
+/// <reference types="node" />
+import { KeyObject } from 'crypto';
+export interface CA {
+    createSigningCertificate: (identityToken: string, publicKey: KeyObject, challenge: Buffer) => Promise<string[]>;
+}
+export interface CAClientOptions {
+    fulcioBaseURL: string;
+}
+export declare class CAClient implements CA {
+    private fulcio;
+    constructor(options: CAClientOptions);
+    createSigningCertificate(identityToken: string, publicKey: KeyObject, challenge: Buffer): Promise<string[]>;
+}
diff --git a/node_modules/sigstore/dist/ca/index.js b/node_modules/sigstore/dist/ca/index.js
new file mode 100644
index 0000000000000..3b90f864f0891
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/index.js
@@ -0,0 +1,23 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CAClient = void 0;
+const client_1 = require("../client");
+const error_1 = require("../error");
+const util_1 = require("../util");
+const format_1 = require("./format");
+class CAClient {
+    constructor(options) {
+        this.fulcio = new client_1.Fulcio({ baseURL: options.fulcioBaseURL });
+    }
+    async createSigningCertificate(identityToken, publicKey, challenge) {
+        const request = (0, format_1.toCertificateRequest)(publicKey, challenge);
+        try {
+            const certificate = await this.fulcio.createSigningCertificate(identityToken, request);
+            return util_1.pem.split(certificate);
+        }
+        catch (err) {
+            throw new error_1.InternalError('error creating signing certificate', err);
+        }
+    }
+}
+exports.CAClient = CAClient;
diff --git a/node_modules/sigstore/dist/ca/verify/chain.d.ts b/node_modules/sigstore/dist/ca/verify/chain.d.ts
new file mode 100644
index 0000000000000..7ccc0dcf8d15c
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/chain.d.ts
@@ -0,0 +1,3 @@
+import * as sigstore from '../../types/sigstore';
+import { x509Certificate } from '../../x509/cert';
+export declare function verifyChain(bundleCerts: sigstore.X509Certificate[], certificateAuthorities: sigstore.CertificateAuthority[]): x509Certificate[];
diff --git a/node_modules/sigstore/dist/ca/verify/chain.js b/node_modules/sigstore/dist/ca/verify/chain.js
new file mode 100644
index 0000000000000..0f6f714695728
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/chain.js
@@ -0,0 +1,64 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyChain = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const cert_1 = require("../../x509/cert");
+const verify_1 = require("../../x509/verify");
+function verifyChain(bundleCerts, certificateAuthorities) {
+    const certs = parseCerts(bundleCerts);
+    const signingCert = certs[0];
+    // Filter the list of certificate authorities to those which are valid for the
+    // signing certificate's notBefore date.
+    const validCAs = filterCertificateAuthorities(certificateAuthorities, signingCert.notBefore);
+    if (validCAs.length === 0) {
+        throw new error_1.VerificationError('No valid certificate authorities');
+    }
+    let trustedChain = [];
+    // Loop through all valid CAs and attempt to verify the certificate chain
+    const verified = validCAs.find((ca) => {
+        const trustedCerts = parseCerts(ca.certChain?.certificates || []);
+        try {
+            trustedChain = (0, verify_1.verifyCertificateChain)({
+                trustedCerts,
+                certs,
+                validAt: signingCert.notBefore,
+            });
+            return true;
+        }
+        catch (e) {
+            return false;
+        }
+    });
+    if (!verified) {
+        throw new error_1.VerificationError('No valid certificate chain');
+    }
+    return trustedChain;
+}
+exports.verifyChain = verifyChain;
+// Filter the list of certificate authorities to those which are valid for the
+// given date.
+function filterCertificateAuthorities(certificateAuthorities, validAt) {
+    return certificateAuthorities.filter((ca) => ca.validFor &&
+        ca.validFor.start &&
+        ca.validFor.start <= validAt &&
+        (!ca.validFor.end || validAt <= ca.validFor.end));
+}
+// Parse the raw bytes of a certificate into an x509Certificate object.
+function parseCerts(certs) {
+    return certs.map((cert) => cert_1.x509Certificate.parse(cert.rawBytes));
+}
diff --git a/node_modules/sigstore/dist/ca/verify/index.d.ts b/node_modules/sigstore/dist/ca/verify/index.d.ts
new file mode 100644
index 0000000000000..ddf65ff6dfffd
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/index.d.ts
@@ -0,0 +1,2 @@
+import * as sigstore from '../../types/sigstore';
+export declare function verifySigningCertificate(bundle: sigstore.BundleWithCertificateChain, trustedRoot: sigstore.TrustedRoot, options: sigstore.CAArtifactVerificationOptions): void;
diff --git a/node_modules/sigstore/dist/ca/verify/index.js b/node_modules/sigstore/dist/ca/verify/index.js
new file mode 100644
index 0000000000000..9c42f3094338f
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/index.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySigningCertificate = void 0;
+const chain_1 = require("./chain");
+const sct_1 = require("./sct");
+const signer_1 = require("./signer");
+function verifySigningCertificate(bundle, trustedRoot, options) {
+    // Check that a trusted certificate chain can be found for the signing
+    // certificate in the bundle
+    const trustedChain = (0, chain_1.verifyChain)(bundle.verificationMaterial.content.x509CertificateChain.certificates, trustedRoot.certificateAuthorities);
+    // Unless disabled, verify the SCTs in the signing certificate
+    if (options.ctlogOptions.disable === false) {
+        (0, sct_1.verifySCTs)(trustedChain, trustedRoot.ctlogs, options.ctlogOptions);
+    }
+    // Verify the signing certificate against the provided identities
+    // if provided
+    if (options.signers) {
+        (0, signer_1.verifySignerIdentity)(trustedChain[0], options.signers.certificateIdentities);
+    }
+}
+exports.verifySigningCertificate = verifySigningCertificate;
diff --git a/node_modules/sigstore/dist/ca/verify/sct.d.ts b/node_modules/sigstore/dist/ca/verify/sct.d.ts
new file mode 100644
index 0000000000000..29391a74cb65e
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/sct.d.ts
@@ -0,0 +1,3 @@
+import * as sigstore from '../../types/sigstore';
+import { x509Certificate } from '../../x509/cert';
+export declare function verifySCTs(certificateChain: x509Certificate[], ctLogs: sigstore.TransparencyLogInstance[], options: sigstore.ArtifactVerificationOptions_CtlogOptions): void;
diff --git a/node_modules/sigstore/dist/ca/verify/sct.js b/node_modules/sigstore/dist/ca/verify/sct.js
new file mode 100644
index 0000000000000..771c6dd4c0a7f
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/sct.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySCTs = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+function verifySCTs(certificateChain, ctLogs, options) {
+    const signingCert = certificateChain[0];
+    const issuerCert = certificateChain[1];
+    const sctResults = signingCert.verifySCTs(issuerCert, ctLogs);
+    // Count the number of verified SCTs which were found
+    const verifiedSCTCount = sctResults.filter((sct) => sct.verified).length;
+    if (verifiedSCTCount < options.threshold) {
+        throw new error_1.VerificationError(`Not enough SCTs verified (found ${verifiedSCTCount}, need ${options.threshold})`);
+    }
+}
+exports.verifySCTs = verifySCTs;
diff --git a/node_modules/sigstore/dist/ca/verify/signer.d.ts b/node_modules/sigstore/dist/ca/verify/signer.d.ts
new file mode 100644
index 0000000000000..7241b90f6ac5c
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/signer.d.ts
@@ -0,0 +1,3 @@
+import * as sigstore from '../../types/sigstore';
+import { x509Certificate } from '../../x509/cert';
+export declare function verifySignerIdentity(signingCert: x509Certificate, identities: sigstore.CertificateIdentities): void;
diff --git a/node_modules/sigstore/dist/ca/verify/signer.js b/node_modules/sigstore/dist/ca/verify/signer.js
new file mode 100644
index 0000000000000..2c49f0bcd683f
--- /dev/null
+++ b/node_modules/sigstore/dist/ca/verify/signer.js
@@ -0,0 +1,131 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySignerIdentity = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const sigstore = __importStar(require("../../types/sigstore"));
+// https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726411--issuer
+const OID_FULCIO_ISSUER = '1.3.6.1.4.1.57264.1.1';
+// https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+const OID_FULCIO_USERNAME_SUBJECT = '1.3.6.1.4.1.57264.1.7';
+// Verifies the identity embedded in a Fulcio-issued signing certificate against
+// the list of trusted identities. Returns without error if at least one of the
+// identities matches the signing certificate; otherwise, throws a
+// VerificationError.
+function verifySignerIdentity(signingCert, identities) {
+    // Check that the signing certificate was issued to at least one of the
+    // specified identities
+    const signerVerified = identities.identities.some((identity) => verifyIdentity(signingCert, identity));
+    if (!signerVerified) {
+        throw new error_1.PolicyError('Certificate issued to untrusted signer');
+    }
+}
+exports.verifySignerIdentity = verifySignerIdentity;
+// Checks that the specified certificate was issued to the specified identity.
+// The certificate must match the issuer, subject alternative name, and an
+// optional list of certificate extensions. Returns true if the certificate was
+// issued to the identity; otherwise, returns false.
+function verifyIdentity(cert, identity) {
+    return (verifyIssuer(cert, identity.issuer) &&
+        verifySAN(cert, identity.san) &&
+        verifyOIDs(cert, identity.oids));
+}
+// Checks the Fulcio issuer extension against the expected issuer. Returns true
+// if the issuer matches; otherwise, returns false.
+function verifyIssuer(cert, issuer) {
+    const issuerExtension = cert.extension(OID_FULCIO_ISSUER);
+    return issuerExtension?.value.toString('ascii') === issuer;
+}
+// Checks the certificate against the expected subject alternative name. Returns
+// true if the SAN matches; otherwise, returns false.
+function verifySAN(cert, expectedSAN) {
+    // Fail if the SAN is not specified or is not a supported type
+    if (expectedSAN === undefined ||
+        expectedSAN.identity === undefined ||
+        expectedSAN.type ===
+            sigstore.SubjectAlternativeNameType
+                .SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED) {
+        return false;
+    }
+    const sanExtension = cert.extSubjectAltName;
+    // Fail if the certificate does not have a SAN extension
+    if (!sanExtension) {
+        return false;
+    }
+    let sanValue;
+    switch (expectedSAN.type) {
+        case sigstore.SubjectAlternativeNameType.EMAIL:
+            sanValue = sanExtension.rfc822Name;
+            break;
+        case sigstore.SubjectAlternativeNameType.URI:
+            sanValue = sanExtension.uri;
+            break;
+        case sigstore.SubjectAlternativeNameType.OTHER_NAME:
+            sanValue = sanExtension.otherName(OID_FULCIO_USERNAME_SUBJECT);
+            break;
+    }
+    // Missing SAN value is an automatic failure
+    if (sanValue === undefined) {
+        return false;
+    }
+    let match;
+    switch (expectedSAN.identity.$case) {
+        case 'value':
+            match = expectedSAN.identity.value;
+            break;
+        case 'regexp':
+            // TODO support regex
+            break;
+    }
+    return sanValue === match;
+}
+// Checks that the certificate contains the specified extensions. Returns true
+// if all extensions are present and match the expected values; otherwise,
+// returns false.
+function verifyOIDs(cert, oids) {
+    return oids.every((expectedExtension) => {
+        if (!expectedExtension.oid) {
+            return false;
+        }
+        const oid = expectedExtension.oid.id.join('.');
+        const extension = cert.extension(oid);
+        return extension?.value.equals(expectedExtension.value);
+    });
+}
diff --git a/node_modules/sigstore/dist/cli/index.d.ts b/node_modules/sigstore/dist/cli/index.d.ts
new file mode 100644
index 0000000000000..395f0a5a69d30
--- /dev/null
+++ b/node_modules/sigstore/dist/cli/index.d.ts
@@ -0,0 +1 @@
+export declare function processArgv(): Promise<void>;
diff --git a/node_modules/sigstore/dist/cli/index.js b/node_modules/sigstore/dist/cli/index.js
new file mode 100644
index 0000000000000..0937d3603a2a9
--- /dev/null
+++ b/node_modules/sigstore/dist/cli/index.js
@@ -0,0 +1,113 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.processArgv = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const index_1 = require("../index");
+const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json';
+async function cli(args) {
+    switch (args[0]) {
+        case 'sign':
+            await sign(args[1]);
+            break;
+        case 'attest':
+            await attest(args[1], args[2]);
+            break;
+        case 'verify':
+            await verify(args[1], args[2]);
+            break;
+        case 'version':
+        case '-version':
+        case '--version':
+        case '-v':
+            // eslint-disable-next-line @typescript-eslint/no-var-requires
+            console.log(require('../../package.json').version);
+            break;
+        case 'help':
+        case '--help':
+        case '-h':
+        case '-?':
+            printUsage();
+            break;
+        default:
+            throw 'Unknown command';
+    }
+}
+function printUsage() {
+    console.log(`sigstore <command> <artifact>
+
+  Usage:
+
+  sigstore sign         sign an artifact
+  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
+  sigstore verify       verify an artifact
+  sigstore version      print version information
+  sigstore help         print help information
+  `);
+}
+const signOptions = {
+    oidcClientID: 'sigstore',
+    oidcIssuer: 'https://oauth2.sigstore.dev/auth',
+    rekorURL: index_1.sigstore.DEFAULT_REKOR_URL,
+};
+async function sign(artifactPath) {
+    const buffer = fs_1.default.readFileSync(artifactPath);
+    const bundle = await index_1.sigstore.sign(buffer, signOptions);
+    const url = `${signOptions.rekorURL}/api/v1/log/entries`;
+    const logIndex = bundle.verificationMaterial?.tlogEntries[0].logIndex;
+    console.error(`Created entry at index ${logIndex}, available at`);
+    console.error(`${url}?logIndex=${logIndex}`);
+    console.log(JSON.stringify(bundle));
+}
+async function attest(artifactPath, payloadType = INTOTO_PAYLOAD_TYPE) {
+    const buffer = fs_1.default.readFileSync(artifactPath);
+    const bundle = await index_1.sigstore.attest(buffer, payloadType, signOptions);
+    console.log(JSON.stringify(bundle));
+}
+async function verify(bundlePath, artifactPath) {
+    let payload = undefined;
+    if (artifactPath) {
+        payload = fs_1.default.readFileSync(artifactPath);
+    }
+    const bundleFile = fs_1.default.readFileSync(bundlePath);
+    const bundle = JSON.parse(bundleFile.toString('utf-8'));
+    try {
+        await index_1.sigstore.verify(bundle, payload, {});
+        console.error('Verified OK');
+    }
+    catch (e) {
+        console.error('Verification failed');
+        if (e instanceof Error) {
+            console.error('Error: ' + e.message);
+        }
+        process.exit(1);
+    }
+}
+async function processArgv() {
+    try {
+        await cli(process.argv.slice(2));
+        process.exit(0);
+    }
+    catch (e) {
+        console.error(e);
+        process.exit(1);
+    }
+}
+exports.processArgv = processArgv;
diff --git a/node_modules/sigstore/dist/client/error.d.ts b/node_modules/sigstore/dist/client/error.d.ts
new file mode 100644
index 0000000000000..87a4bc5451a3d
--- /dev/null
+++ b/node_modules/sigstore/dist/client/error.d.ts
@@ -0,0 +1,10 @@
+import fetch from 'make-fetch-happen';
+type Response = Awaited<ReturnType<typeof fetch>>;
+export declare class HTTPError extends Error {
+    response: Response;
+    statusCode: number;
+    location?: string;
+    constructor(response: Response);
+}
+export declare const checkStatus: (response: Response) => Response;
+export {};
diff --git a/node_modules/sigstore/dist/client/error.js b/node_modules/sigstore/dist/client/error.js
new file mode 100644
index 0000000000000..d1e1c3df8a878
--- /dev/null
+++ b/node_modules/sigstore/dist/client/error.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.checkStatus = exports.HTTPError = void 0;
+class HTTPError extends Error {
+    constructor(response) {
+        super(`HTTP Error: ${response.status} ${response.statusText}`);
+        this.response = response;
+        this.statusCode = response.status;
+        this.location = response.headers?.get('Location') || undefined;
+    }
+}
+exports.HTTPError = HTTPError;
+const checkStatus = (response) => {
+    if (response.ok) {
+        return response;
+    }
+    else {
+        throw new HTTPError(response);
+    }
+};
+exports.checkStatus = checkStatus;
diff --git a/node_modules/sigstore/dist/client/fulcio.d.ts b/node_modules/sigstore/dist/client/fulcio.d.ts
new file mode 100644
index 0000000000000..72ed51ae75d31
--- /dev/null
+++ b/node_modules/sigstore/dist/client/fulcio.d.ts
@@ -0,0 +1,18 @@
+export interface FulcioOptions {
+    baseURL: string;
+}
+export interface CertificateRequest {
+    publicKey: {
+        content: string;
+    };
+    signedEmailAddress: string;
+}
+/**
+ * Fulcio API client.
+ */
+export declare class Fulcio {
+    private fetch;
+    private baseUrl;
+    constructor(options: FulcioOptions);
+    createSigningCertificate(idToken: string, request: CertificateRequest): Promise<string>;
+}
diff --git a/node_modules/sigstore/dist/client/fulcio.js b/node_modules/sigstore/dist/client/fulcio.js
new file mode 100644
index 0000000000000..04041387f1a0b
--- /dev/null
+++ b/node_modules/sigstore/dist/client/fulcio.js
@@ -0,0 +1,53 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Fulcio = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+/**
+ * Fulcio API client.
+ */
+class Fulcio {
+    constructor(options) {
+        this.fetch = make_fetch_happen_1.default.defaults({
+            retry: { retries: 2 },
+            timeout: 5000,
+            headers: {
+                Accept: 'application/pem-certificate-chain',
+                'Content-Type': 'application/json',
+                'User-Agent': util_1.ua.getUserAgent(),
+            },
+        });
+        this.baseUrl = options.baseURL;
+    }
+    async createSigningCertificate(idToken, request) {
+        const url = `${this.baseUrl}/api/v1/signingCert`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            headers: { Authorization: `Bearer ${idToken}` },
+            body: JSON.stringify(request),
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.text();
+        return data;
+    }
+}
+exports.Fulcio = Fulcio;
diff --git a/node_modules/sigstore/dist/client/index.d.ts b/node_modules/sigstore/dist/client/index.d.ts
new file mode 100644
index 0000000000000..2102381c0ffc1
--- /dev/null
+++ b/node_modules/sigstore/dist/client/index.d.ts
@@ -0,0 +1,2 @@
+export { Fulcio } from './fulcio';
+export { Rekor } from './rekor';
diff --git a/node_modules/sigstore/dist/client/index.js b/node_modules/sigstore/dist/client/index.js
new file mode 100644
index 0000000000000..c6d1d9ad36a54
--- /dev/null
+++ b/node_modules/sigstore/dist/client/index.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Rekor = exports.Fulcio = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "Fulcio", { enumerable: true, get: function () { return fulcio_1.Fulcio; } });
+var rekor_1 = require("./rekor");
+Object.defineProperty(exports, "Rekor", { enumerable: true, get: function () { return rekor_1.Rekor; } });
diff --git a/node_modules/sigstore/dist/client/rekor.d.ts b/node_modules/sigstore/dist/client/rekor.d.ts
new file mode 100644
index 0000000000000..55a909f11a35d
--- /dev/null
+++ b/node_modules/sigstore/dist/client/rekor.d.ts
@@ -0,0 +1,45 @@
+import { Entry, EntryKind } from '../tlog';
+export interface RekorOptions {
+    baseURL: string;
+}
+export interface SearchIndex {
+    email?: string;
+    hash?: string;
+}
+export interface SearchLogQuery {
+    entries?: EntryKind[];
+    entryUUIDs?: string[];
+    logIndexes?: number[];
+}
+/**
+ * Rekor API client.
+ */
+export declare class Rekor {
+    private fetch;
+    private baseUrl;
+    constructor(options: RekorOptions);
+    /**
+     * Create a new entry in the Rekor log.
+     * @param propsedEntry {EntryKind} Data to create a new entry
+     * @returns {Promise<Entry>} The created entry
+     */
+    createEntry(propsedEntry: EntryKind): Promise<Entry>;
+    /**
+     * Get an entry from the Rekor log.
+     * @param uuid {string} The UUID of the entry to retrieve
+     * @returns {Promise<Entry>} The retrieved entry
+     */
+    getEntry(uuid: string): Promise<Entry>;
+    /**
+     * Search the Rekor log index for entries matching the given query.
+     * @param opts {SearchIndex} Options to search the Rekor log
+     * @returns {Promise<string[]>} UUIDs of matching entries
+     */
+    searchIndex(opts: SearchIndex): Promise<string[]>;
+    /**
+     * Search the Rekor logs for matching the given query.
+     * @param opts {SearchLogQuery} Query to search the Rekor log
+     * @returns {Promise<Entry[]>} List of matching entries
+     */
+    searchLog(opts: SearchLogQuery): Promise<Entry[]>;
+}
diff --git a/node_modules/sigstore/dist/client/rekor.js b/node_modules/sigstore/dist/client/rekor.js
new file mode 100644
index 0000000000000..6bb085c44cecd
--- /dev/null
+++ b/node_modules/sigstore/dist/client/rekor.js
@@ -0,0 +1,115 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Rekor = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+/**
+ * Rekor API client.
+ */
+class Rekor {
+    constructor(options) {
+        this.fetch = make_fetch_happen_1.default.defaults({
+            retry: { retries: 2 },
+            timeout: 5000,
+            headers: {
+                Accept: 'application/json',
+                'User-Agent': util_1.ua.getUserAgent(),
+            },
+        });
+        this.baseUrl = options.baseURL;
+    }
+    /**
+     * Create a new entry in the Rekor log.
+     * @param propsedEntry {EntryKind} Data to create a new entry
+     * @returns {Promise<Entry>} The created entry
+     */
+    async createEntry(propsedEntry) {
+        const url = `${this.baseUrl}/api/v1/log/entries`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            headers: { 'Content-Type': 'application/json' },
+            body: JSON.stringify(propsedEntry),
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+    /**
+     * Get an entry from the Rekor log.
+     * @param uuid {string} The UUID of the entry to retrieve
+     * @returns {Promise<Entry>} The retrieved entry
+     */
+    async getEntry(uuid) {
+        const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`;
+        const response = await this.fetch(url);
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+    /**
+     * Search the Rekor log index for entries matching the given query.
+     * @param opts {SearchIndex} Options to search the Rekor log
+     * @returns {Promise<string[]>} UUIDs of matching entries
+     */
+    async searchIndex(opts) {
+        const url = `${this.baseUrl}/api/v1/index/retrieve`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(opts),
+            headers: { 'Content-Type': 'application/json' },
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return data;
+    }
+    /**
+     * Search the Rekor logs for matching the given query.
+     * @param opts {SearchLogQuery} Query to search the Rekor log
+     * @returns {Promise<Entry[]>} List of matching entries
+     */
+    async searchLog(opts) {
+        const url = `${this.baseUrl}/api/v1/log/entries/retrieve`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(opts),
+            headers: { 'Content-Type': 'application/json' },
+        });
+        (0, error_1.checkStatus)(response);
+        const rawData = await response.json();
+        const data = rawData.map((d) => entryFromResponse(d));
+        return data;
+    }
+}
+exports.Rekor = Rekor;
+// Unpack the response from the Rekor API into a more convenient format.
+function entryFromResponse(data) {
+    const entries = Object.entries(data);
+    if (entries.length != 1) {
+        throw new Error('Received multiple entries in Rekor response');
+    }
+    // Grab UUID and entry data from the response
+    const [uuid, entry] = Object.entries(data)[0];
+    return {
+        ...entry,
+        uuid,
+    };
+}
diff --git a/node_modules/sigstore/dist/error.d.ts b/node_modules/sigstore/dist/error.d.ts
new file mode 100644
index 0000000000000..0e2dad0c57de8
--- /dev/null
+++ b/node_modules/sigstore/dist/error.d.ts
@@ -0,0 +1,13 @@
+declare class BaseError extends Error {
+    cause: any | undefined;
+    constructor(message: string, cause?: any);
+}
+export declare class VerificationError extends BaseError {
+}
+export declare class ValidationError extends BaseError {
+}
+export declare class InternalError extends BaseError {
+}
+export declare class PolicyError extends BaseError {
+}
+export {};
diff --git a/node_modules/sigstore/dist/error.js b/node_modules/sigstore/dist/error.js
new file mode 100644
index 0000000000000..e302d3aee6ad5
--- /dev/null
+++ b/node_modules/sigstore/dist/error.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PolicyError = exports.InternalError = exports.ValidationError = exports.VerificationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+/* eslint-disable @typescript-eslint/no-explicit-any */
+class BaseError extends Error {
+    constructor(message, cause) {
+        super(message);
+        this.name = this.constructor.name;
+        this.cause = cause;
+    }
+}
+class VerificationError extends BaseError {
+}
+exports.VerificationError = VerificationError;
+class ValidationError extends BaseError {
+}
+exports.ValidationError = ValidationError;
+class InternalError extends BaseError {
+}
+exports.InternalError = InternalError;
+class PolicyError extends BaseError {
+}
+exports.PolicyError = PolicyError;
diff --git a/node_modules/sigstore/dist/identity/ci.d.ts b/node_modules/sigstore/dist/identity/ci.d.ts
new file mode 100644
index 0000000000000..428606f26524b
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/ci.d.ts
@@ -0,0 +1,11 @@
+import { Provider } from './provider';
+/**
+ * CIContextProvider is a composite identity provider which will iterate
+ * over all of the CI-specific providers and return the token from the first
+ * one that resolves.
+ */
+export declare class CIContextProvider implements Provider {
+    private audience;
+    constructor(audience: string);
+    getToken(): Promise<string>;
+}
diff --git a/node_modules/sigstore/dist/identity/ci.js b/node_modules/sigstore/dist/identity/ci.js
new file mode 100644
index 0000000000000..fecf63f4ec991
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/ci.js
@@ -0,0 +1,65 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+// Collection of all the CI-specific providers we have implemented
+const providers = [getGHAToken];
+/**
+ * CIContextProvider is a composite identity provider which will iterate
+ * over all of the CI-specific providers and return the token from the first
+ * one that resolves.
+ */
+class CIContextProvider {
+    constructor(audience) {
+        this.audience = audience;
+    }
+    // Invoke all registered ProviderFuncs and return the value of whichever one
+    // resolves first.
+    async getToken() {
+        return util_1.promise
+            .promiseAny(providers.map((getToken) => getToken(this.audience)))
+            .catch(() => Promise.reject('CI: no tokens available'));
+    }
+}
+exports.CIContextProvider = CIContextProvider;
+/**
+ * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
+ * workflow
+ */
+async function getGHAToken(audience) {
+    // Check to see if we're running in GitHub Actions
+    if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
+        !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
+        return Promise.reject('no token available');
+    }
+    // Construct URL to request token w/ appropriate audience
+    const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
+    url.searchParams.append('audience', audience);
+    const response = await (0, make_fetch_happen_1.default)(url.href, {
+        retry: 2,
+        headers: {
+            Accept: 'application/json',
+            Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
+        },
+    });
+    return response.json().then((data) => data.value);
+}
diff --git a/node_modules/sigstore/dist/identity/index.d.ts b/node_modules/sigstore/dist/identity/index.d.ts
new file mode 100644
index 0000000000000..e3d23b4dbf8c6
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/index.d.ts
@@ -0,0 +1,25 @@
+import { Provider } from './provider';
+/**
+ * oauthProvider returns a new Provider instance which attempts to retrieve
+ * an identity token from the configured OAuth2 issuer.
+ *
+ * @param issuer Base URL of the issuer
+ * @param clientID Client ID for the issuer
+ * @param clientSecret Client secret for the issuer (optional)
+ * @returns {Provider}
+ */
+declare function oauthProvider(issuer: string, clientID: string, clientSecret?: string): Provider;
+/**
+ * ciContextProvider returns a new Provider instance which attempts to retrieve
+ * an identity token from the CI context.
+ *
+ * @param audience audience claim for the generated token
+ * @returns {Provider}
+ */
+declare function ciContextProvider(audience?: string): Provider;
+declare const _default: {
+    ciContextProvider: typeof ciContextProvider;
+    oauthProvider: typeof oauthProvider;
+};
+export default _default;
+export { Provider } from './provider';
diff --git a/node_modules/sigstore/dist/identity/index.js b/node_modules/sigstore/dist/identity/index.js
new file mode 100644
index 0000000000000..bfb203aa39e8c
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/index.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const ci_1 = require("./ci");
+const issuer_1 = require("./issuer");
+const oauth_1 = require("./oauth");
+/**
+ * oauthProvider returns a new Provider instance which attempts to retrieve
+ * an identity token from the configured OAuth2 issuer.
+ *
+ * @param issuer Base URL of the issuer
+ * @param clientID Client ID for the issuer
+ * @param clientSecret Client secret for the issuer (optional)
+ * @returns {Provider}
+ */
+function oauthProvider(issuer, clientID, clientSecret) {
+    return new oauth_1.OAuthProvider(new issuer_1.Issuer(issuer), clientID, clientSecret);
+}
+/**
+ * ciContextProvider returns a new Provider instance which attempts to retrieve
+ * an identity token from the CI context.
+ *
+ * @param audience audience claim for the generated token
+ * @returns {Provider}
+ */
+function ciContextProvider(audience = 'sigstore') {
+    return new ci_1.CIContextProvider(audience);
+}
+exports.default = {
+    ciContextProvider,
+    oauthProvider,
+};
diff --git a/node_modules/sigstore/dist/identity/issuer.d.ts b/node_modules/sigstore/dist/identity/issuer.d.ts
new file mode 100644
index 0000000000000..37ad713f4d89a
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/issuer.d.ts
@@ -0,0 +1,15 @@
+/**
+ * The Issuer reperesents a single OAuth2 provider.
+ *
+ * The Issuer is configured with a provider's base OAuth2 endpoint which is
+ * used to retrieve the associated configuration information.
+ */
+export declare class Issuer {
+    private baseURL;
+    private fetch;
+    private config?;
+    constructor(baseURL: string);
+    authEndpoint(): Promise<string>;
+    tokenEndpoint(): Promise<string>;
+    private loadOpenIDConfig;
+}
diff --git a/node_modules/sigstore/dist/identity/issuer.js b/node_modules/sigstore/dist/identity/issuer.js
new file mode 100644
index 0000000000000..2bf6c20f34932
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/issuer.js
@@ -0,0 +1,53 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Issuer = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+// Standard endpoint for retrieving OpenID configuration information
+const OPENID_CONFIG_PATH = '/.well-known/openid-configuration';
+/**
+ * The Issuer reperesents a single OAuth2 provider.
+ *
+ * The Issuer is configured with a provider's base OAuth2 endpoint which is
+ * used to retrieve the associated configuration information.
+ */
+class Issuer {
+    constructor(baseURL) {
+        this.baseURL = baseURL;
+        this.fetch = make_fetch_happen_1.default.defaults({ retry: 2 });
+    }
+    async authEndpoint() {
+        if (!this.config) {
+            this.config = await this.loadOpenIDConfig();
+        }
+        return this.config.authorization_endpoint;
+    }
+    async tokenEndpoint() {
+        if (!this.config) {
+            this.config = await this.loadOpenIDConfig();
+        }
+        return this.config.token_endpoint;
+    }
+    async loadOpenIDConfig() {
+        const url = `${this.baseURL}${OPENID_CONFIG_PATH}`;
+        return this.fetch(url).then((res) => res.json());
+    }
+}
+exports.Issuer = Issuer;
diff --git a/node_modules/sigstore/dist/identity/oauth.d.ts b/node_modules/sigstore/dist/identity/oauth.d.ts
new file mode 100644
index 0000000000000..d02e212a7f88e
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/oauth.d.ts
@@ -0,0 +1,19 @@
+import { Issuer } from './issuer';
+import { Provider } from './provider';
+export declare class OAuthProvider implements Provider {
+    private clientID;
+    private clientSecret;
+    private issuer;
+    private codeVerifier;
+    private state;
+    private redirectURI?;
+    constructor(issuer: Issuer, clientID: string, clientSecret?: string);
+    getToken(): Promise<string>;
+    private initiateAuthRequest;
+    private getIDToken;
+    private getBasicAuthHeaderValue;
+    private getAuthRequestURL;
+    private getAuthRequestParams;
+    private getCodeChallenge;
+    private openURL;
+}
diff --git a/node_modules/sigstore/dist/identity/oauth.js b/node_modules/sigstore/dist/identity/oauth.js
new file mode 100644
index 0000000000000..651a0c7e80f1b
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/oauth.js
@@ -0,0 +1,188 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.OAuthProvider = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const assert_1 = __importDefault(require("assert"));
+const child_process_1 = __importDefault(require("child_process"));
+const http_1 = __importDefault(require("http"));
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const url_1 = require("url");
+const util_1 = require("../util");
+class OAuthProvider {
+    constructor(issuer, clientID, clientSecret) {
+        this.clientID = clientID;
+        this.clientSecret = clientSecret || '';
+        this.issuer = issuer;
+        this.codeVerifier = generateRandomString(32);
+        this.state = generateRandomString(16);
+    }
+    async getToken() {
+        const authCode = await this.initiateAuthRequest();
+        return this.getIDToken(authCode);
+    }
+    // Initates the authorization request. This will start an HTTP server to
+    // receive the post-auth redirect and then open the user's default browser to
+    // the provider's authorization page.
+    async initiateAuthRequest() {
+        const server = http_1.default.createServer();
+        const sockets = new Set();
+        // Start server and wait till it is listening
+        await new Promise((resolve) => {
+            server.listen(0, resolve);
+        });
+        // Keep track of connections to the server so we can force a shutdown
+        server.on('connection', (socket) => {
+            sockets.add(socket);
+            socket.once('close', () => {
+                sockets.delete(socket);
+            });
+        });
+        // Get port the server is listening on and construct the server URL
+        const port = server.address().port;
+        this.redirectURI = `http://localhost:${port}`;
+        const result = new Promise((resolve, reject) => {
+            // Set-up handler for post-auth redirect
+            server.on('request', (req, res) => {
+                if (!req.url) {
+                    reject('invalid server request');
+                    return;
+                }
+                res.writeHead(200);
+                res.end('Auth Successful');
+                // Parse incoming request URL
+                const query = new url_1.URL(req.url, this.redirectURI).searchParams;
+                // Check to see if the state matches
+                if (query.get('state') !== this.state) {
+                    reject('invalid state value');
+                    return;
+                }
+                const authCode = query.get('code');
+                // Force-close any open connections to the server so we can get a
+                // clean shutdown
+                for (const socket of sockets) {
+                    socket.destroy();
+                    sockets.delete(socket);
+                }
+                // Return auth code once we've shutdown server
+                server.close(() => {
+                    if (!authCode) {
+                        reject('authorization code not found');
+                    }
+                    else {
+                        resolve(authCode);
+                    }
+                });
+            });
+        });
+        try {
+            // Open browser to start authorization request
+            const authBaseURL = await this.issuer.authEndpoint();
+            const authURL = this.getAuthRequestURL(authBaseURL);
+            await this.openURL(authURL);
+        }
+        catch (err) {
+            // Prevent leaked server handler on error
+            server.close();
+            throw err;
+        }
+        return result;
+    }
+    // Uses the provided authorization code, to retrieve the ID token from the
+    // provider
+    async getIDToken(authCode) {
+        (0, assert_1.default)(this.redirectURI);
+        const tokenEndpointURL = await this.issuer.tokenEndpoint();
+        const params = new url_1.URLSearchParams();
+        params.append('grant_type', 'authorization_code');
+        params.append('code', authCode);
+        params.append('redirect_uri', this.redirectURI);
+        params.append('code_verifier', this.codeVerifier);
+        const response = await (0, make_fetch_happen_1.default)(tokenEndpointURL, {
+            method: 'POST',
+            headers: { Authorization: `Basic ${this.getBasicAuthHeaderValue()}` },
+            body: params,
+        }).then((r) => r.json());
+        return response.id_token;
+    }
+    // Construct the basic auth header value from the client ID and secret
+    getBasicAuthHeaderValue() {
+        return util_1.encoding.base64Encode(`${this.clientID}:${this.clientSecret}`);
+    }
+    // Generate starting URL for authorization request
+    getAuthRequestURL(baseURL) {
+        const params = this.getAuthRequestParams();
+        return `${baseURL}?${params.toString()}`;
+    }
+    // Collect parameters for authorization request
+    getAuthRequestParams() {
+        (0, assert_1.default)(this.redirectURI);
+        const codeChallenge = this.getCodeChallenge();
+        return new url_1.URLSearchParams({
+            response_type: 'code',
+            client_id: this.clientID,
+            client_secret: this.clientSecret,
+            scope: 'openid email',
+            redirect_uri: this.redirectURI,
+            code_challenge: codeChallenge,
+            code_challenge_method: 'S256',
+            state: this.state,
+            nonce: generateRandomString(16),
+        });
+    }
+    // Generate code challenge for authorization request
+    getCodeChallenge() {
+        return util_1.encoding.base64URLEscape(util_1.crypto.hash(this.codeVerifier).toString('base64'));
+    }
+    // Open the supplied URL in the user's default browser
+    async openURL(url) {
+        return new Promise((resolve, reject) => {
+            let open = null;
+            let command = `"${url}"`;
+            switch (process.platform) {
+                case 'darwin':
+                    open = 'open';
+                    break;
+                case 'linux' || 'freebsd' || 'netbsd' || 'openbsd':
+                    open = 'xdg-open';
+                    break;
+                case 'win32':
+                    open = 'start';
+                    command = `"" ${command}`;
+                    break;
+                default:
+                    return reject(`OAuth: unsupported platform: ${process.platform}`);
+            }
+            console.error(`Your browser will now be opened to: ${url}`);
+            child_process_1.default.exec(`${open} ${command}`, undefined, (err) => {
+                if (err) {
+                    reject(err);
+                }
+                else {
+                    resolve();
+                }
+            });
+        });
+    }
+}
+exports.OAuthProvider = OAuthProvider;
+// Generate random code verifier value
+function generateRandomString(len) {
+    return util_1.encoding.base64URLEscape(util_1.crypto.randomBytes(len).toString('base64'));
+}
diff --git a/node_modules/sigstore/dist/identity/provider.d.ts b/node_modules/sigstore/dist/identity/provider.d.ts
new file mode 100644
index 0000000000000..95ec03e9ffff6
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/provider.d.ts
@@ -0,0 +1,3 @@
+export interface Provider {
+    getToken: () => Promise<string>;
+}
diff --git a/node_modules/sigstore/dist/identity/provider.js b/node_modules/sigstore/dist/identity/provider.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/sigstore/dist/identity/provider.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/index.d.ts b/node_modules/sigstore/dist/index.d.ts
new file mode 100644
index 0000000000000..fb23e5e622655
--- /dev/null
+++ b/node_modules/sigstore/dist/index.d.ts
@@ -0,0 +1 @@
+export * as sigstore from './sigstore';
diff --git a/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js
new file mode 100644
index 0000000000000..502155e4d5f3f
--- /dev/null
+++ b/node_modules/sigstore/dist/index.js
@@ -0,0 +1,42 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sigstore = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+exports.sigstore = __importStar(require("./sigstore"));
diff --git a/node_modules/sigstore/dist/merkle/digest.d.ts b/node_modules/sigstore/dist/merkle/digest.d.ts
new file mode 100644
index 0000000000000..5c692214f91a0
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/digest.d.ts
@@ -0,0 +1,8 @@
+/// <reference types="node" />
+export declare class Hasher {
+    private algorithm;
+    constructor(algorithm?: string);
+    size(): number;
+    hashLeaf(leaf: Buffer): Buffer;
+    hashChildren(l: Buffer, r: Buffer): Buffer;
+}
diff --git a/node_modules/sigstore/dist/merkle/digest.js b/node_modules/sigstore/dist/merkle/digest.js
new file mode 100644
index 0000000000000..5b7ff04a506ae
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/digest.js
@@ -0,0 +1,48 @@
+"use strict";
+/*
+Copyright 2022 GitHub, Inc
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Hasher = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const RFC6962LeafHashPrefix = Buffer.from([0x00]);
+const RFC6962NodeHashPrefix = Buffer.from([0x01]);
+// Implements Merkle Tree Hash logic according to RFC6962.
+// https://datatracker.ietf.org/doc/html/rfc6962#section-2
+class Hasher {
+    constructor(algorithm = 'sha256') {
+        this.algorithm = algorithm;
+    }
+    size() {
+        return crypto_1.default.createHash(this.algorithm).digest().length;
+    }
+    hashLeaf(leaf) {
+        const hasher = crypto_1.default.createHash(this.algorithm);
+        hasher.update(RFC6962LeafHashPrefix);
+        hasher.update(leaf);
+        return hasher.digest();
+    }
+    hashChildren(l, r) {
+        const hasher = crypto_1.default.createHash(this.algorithm);
+        hasher.update(RFC6962NodeHashPrefix);
+        hasher.update(l);
+        hasher.update(r);
+        return hasher.digest();
+    }
+}
+exports.Hasher = Hasher;
diff --git a/node_modules/sigstore/dist/merkle/index.d.ts b/node_modules/sigstore/dist/merkle/index.d.ts
new file mode 100644
index 0000000000000..d8ffe7c03fb7d
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/index.d.ts
@@ -0,0 +1,2 @@
+export { Hasher } from './digest';
+export { verifyInclusion } from './verify';
diff --git a/node_modules/sigstore/dist/merkle/index.js b/node_modules/sigstore/dist/merkle/index.js
new file mode 100644
index 0000000000000..2dd39f10b646d
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/index.js
@@ -0,0 +1,22 @@
+"use strict";
+/*
+Copyright 2022 GitHub, Inc
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyInclusion = exports.Hasher = void 0;
+var digest_1 = require("./digest");
+Object.defineProperty(exports, "Hasher", { enumerable: true, get: function () { return digest_1.Hasher; } });
+var verify_1 = require("./verify");
+Object.defineProperty(exports, "verifyInclusion", { enumerable: true, get: function () { return verify_1.verifyInclusion; } });
diff --git a/node_modules/sigstore/dist/merkle/verify.d.ts b/node_modules/sigstore/dist/merkle/verify.d.ts
new file mode 100644
index 0000000000000..b1b28b7bfc10c
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/verify.d.ts
@@ -0,0 +1,3 @@
+/// <reference types="node" />
+import { Hasher } from './digest';
+export declare function verifyInclusion(hasher: Hasher, index: bigint, size: bigint, leafHash: Buffer, proof: Buffer[], root: Buffer): boolean;
diff --git a/node_modules/sigstore/dist/merkle/verify.js b/node_modules/sigstore/dist/merkle/verify.js
new file mode 100644
index 0000000000000..345543425aa08
--- /dev/null
+++ b/node_modules/sigstore/dist/merkle/verify.js
@@ -0,0 +1,78 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyInclusion = void 0;
+/*
+Copyright 2022 GitHub, Inc
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// Implementation largely copied from
+// https://github.com/transparency-dev/merkle/blob/main/proof/verify.go#L46
+// Verifies the correctness of the inclusion proof for the given leaf hash
+// and index relative to the tree of the given size and root hash.
+function verifyInclusion(hasher, index, size, leafHash, proof, root) {
+    const calcroot = rootFromInclusionProof(hasher, index, size, leafHash, proof);
+    return calcroot.equals(root);
+}
+exports.verifyInclusion = verifyInclusion;
+// Calculates the expected root hash for a tree of the given size, provided a
+// leaf index and hash with corresponding inclusion proof.
+function rootFromInclusionProof(hasher, index, size, leafHash, proof) {
+    if (index >= size) {
+        throw new Error('index exceeds size of tree');
+    }
+    if (leafHash.length !== hasher.size()) {
+        throw new Error('leafHash has unexpected size');
+    }
+    const { inner, border } = decompInclProof(index, size);
+    if (proof.length != inner + border) {
+        throw new Error('invalid proof length');
+    }
+    let hash = chainInner(hasher, leafHash, proof.slice(0, inner), index);
+    hash = chainBorderRight(hasher, hash, proof.slice(inner));
+    return hash;
+}
+// Breaks down inclusion proof for a leaf at the specified index in a tree of
+// the specified size. The split point is where paths to the index leaf and
+// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
+// parts.
+function decompInclProof(index, size) {
+    const inner = innerProofSize(index, size);
+    const border = onesCount(index >> BigInt(inner));
+    return { inner, border };
+}
+// Computes a subtree hash for an node on or below the tree's right border.
+// Assumes the provided proof hashes are ordered from lower to higher levels
+// and seed is the initial hash of the node specified by the index.
+function chainInner(hasher, seed, proof, index) {
+    return proof.reduce((acc, h, i) => {
+        if ((index >> BigInt(i)) & BigInt(1)) {
+            return hasher.hashChildren(h, acc);
+        }
+        else {
+            return hasher.hashChildren(acc, h);
+        }
+    }, seed);
+}
+// Computes a subtree hash for nodes along the tree's right border.
+function chainBorderRight(hasher, seed, proof) {
+    return proof.reduce((acc, h) => hasher.hashChildren(h, acc), seed);
+}
+function innerProofSize(index, size) {
+    return (index ^ (size - BigInt(1))).toString(2).length;
+}
+// Counts the number of ones in the binary representation of the given number.
+// https://en.wikipedia.org/wiki/Hamming_weight
+function onesCount(x) {
+    return x.toString(2).split('1').length - 1;
+}
diff --git a/node_modules/sigstore/dist/sign.d.ts b/node_modules/sigstore/dist/sign.d.ts
new file mode 100644
index 0000000000000..7d8b4f0de464e
--- /dev/null
+++ b/node_modules/sigstore/dist/sign.d.ts
@@ -0,0 +1,23 @@
+/// <reference types="node" />
+import { CA } from './ca';
+import { Provider } from './identity';
+import { TLog } from './tlog';
+import { SignerFunc } from './types/signature';
+import { Bundle } from './types/sigstore';
+export interface SignOptions {
+    ca: CA;
+    tlog: TLog;
+    identityProviders: Provider[];
+    signer?: SignerFunc;
+}
+export declare class Signer {
+    private ca;
+    private tlog;
+    private signer;
+    private identityProviders;
+    constructor(options: SignOptions);
+    signBlob(payload: Buffer): Promise<Bundle>;
+    signAttestation(payload: Buffer, payloadType: string): Promise<Bundle>;
+    private signWithEphemeralKey;
+    private getIdentityToken;
+}
diff --git a/node_modules/sigstore/dist/sign.js b/node_modules/sigstore/dist/sign.js
new file mode 100644
index 0000000000000..97c3da04b065b
--- /dev/null
+++ b/node_modules/sigstore/dist/sign.js
@@ -0,0 +1,73 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signer = void 0;
+const util_1 = require("./util");
+class Signer {
+    constructor(options) {
+        this.identityProviders = [];
+        this.ca = options.ca;
+        this.tlog = options.tlog;
+        this.identityProviders = options.identityProviders;
+        this.signer = options.signer || this.signWithEphemeralKey.bind(this);
+    }
+    async signBlob(payload) {
+        // Get signature and verification material for payload
+        const sigMaterial = await this.signer(payload);
+        // Calculate artifact digest
+        const digest = util_1.crypto.hash(payload);
+        // Create Rekor entry
+        return this.tlog.createMessageSignatureEntry(digest, sigMaterial);
+    }
+    async signAttestation(payload, payloadType) {
+        // Pre-authentication encoding to be signed
+        const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
+        // Get signature and verification material for pae
+        const sigMaterial = await this.signer(paeBuffer);
+        const envelope = {
+            payloadType,
+            payload: payload,
+            signatures: [
+                {
+                    keyid: sigMaterial.key?.id || '',
+                    sig: sigMaterial.signature,
+                },
+            ],
+        };
+        return this.tlog.createDSSEEntry(envelope, sigMaterial);
+    }
+    async signWithEphemeralKey(payload) {
+        // Create emphemeral key pair
+        const keypair = util_1.crypto.generateKeyPair();
+        // Retrieve identity token from one of the supplied identity providers
+        const identityToken = await this.getIdentityToken();
+        // Extract challenge claim from OIDC token
+        const subject = util_1.oidc.extractJWTSubject(identityToken);
+        // Construct challenge value by encrypting subject with private key
+        const challenge = util_1.crypto.signBlob(Buffer.from(subject), keypair.privateKey);
+        // Create signing certificate
+        const certificates = await this.ca.createSigningCertificate(identityToken, keypair.publicKey, challenge);
+        // Generate artifact signature
+        const signature = util_1.crypto.signBlob(payload, keypair.privateKey);
+        return {
+            signature,
+            certificates,
+            key: undefined,
+        };
+    }
+    async getIdentityToken() {
+        const aggErrs = [];
+        for (const provider of this.identityProviders) {
+            try {
+                const token = await provider.getToken();
+                if (token) {
+                    return token;
+                }
+            }
+            catch (err) {
+                aggErrs.push(err);
+            }
+        }
+        throw new Error(`Identity token providers failed: ${aggErrs}`);
+    }
+}
+exports.Signer = Signer;
diff --git a/node_modules/sigstore/dist/sigstore-utils.d.ts b/node_modules/sigstore/dist/sigstore-utils.d.ts
new file mode 100644
index 0000000000000..e3e3fd4c4e273
--- /dev/null
+++ b/node_modules/sigstore/dist/sigstore-utils.d.ts
@@ -0,0 +1,7 @@
+/// <reference types="node" />
+import { Bundle, Envelope, SignOptions } from './sigstore';
+import { SignerFunc } from './types/signature';
+export declare function createDSSEEnvelope(payload: Buffer, payloadType: string, options: {
+    signer: SignerFunc;
+}): Promise<Envelope>;
+export declare function createRekorEntry(dsseEnvelope: Envelope, publicKey: string, options?: SignOptions): Promise<Bundle>;
diff --git a/node_modules/sigstore/dist/sigstore-utils.js b/node_modules/sigstore/dist/sigstore-utils.js
new file mode 100644
index 0000000000000..f11cf8c13cc45
--- /dev/null
+++ b/node_modules/sigstore/dist/sigstore-utils.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.createRekorEntry = exports.createDSSEEnvelope = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const sigstore_1 = require("./sigstore");
+const tlog_1 = require("./tlog");
+const signature_1 = require("./types/signature");
+const sigstore_2 = require("./types/sigstore");
+const util_1 = require("./util");
+function createTLogClient(options) {
+    return new tlog_1.TLogClient({
+        rekorBaseURL: options.rekorURL || sigstore_1.DEFAULT_REKOR_URL,
+    });
+}
+async function createDSSEEnvelope(payload, payloadType, options) {
+    // Pre-authentication encoding to be signed
+    const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
+    // Get signature and verification material for pae
+    const sigMaterial = await options.signer(paeBuffer);
+    const envelope = {
+        payloadType,
+        payload,
+        signatures: [
+            {
+                keyid: sigMaterial.key?.id || '',
+                sig: sigMaterial.signature,
+            },
+        ],
+    };
+    return (0, sigstore_2.envelopeToJSON)(envelope);
+}
+exports.createDSSEEnvelope = createDSSEEnvelope;
+// Accepts a signed DSSE envelope and a PEM-encoded public key to be added to the
+// transparency log. Returns a Sigstore bundle suitable for offline verification.
+async function createRekorEntry(dsseEnvelope, publicKey, options = {}) {
+    const envelope = (0, sigstore_2.envelopeFromJSON)(dsseEnvelope);
+    const tlog = createTLogClient(options);
+    const sigMaterial = (0, signature_1.extractSignatureMaterial)(envelope, publicKey);
+    const bundle = await tlog.createDSSEEntry(envelope, sigMaterial, {
+        fetchOnConflict: true,
+    });
+    return (0, sigstore_2.bundleToJSON)(bundle);
+}
+exports.createRekorEntry = createRekorEntry;
diff --git a/node_modules/sigstore/dist/sigstore.d.ts b/node_modules/sigstore/dist/sigstore.d.ts
new file mode 100644
index 0000000000000..bb3034383e695
--- /dev/null
+++ b/node_modules/sigstore/dist/sigstore.d.ts
@@ -0,0 +1,30 @@
+/// <reference types="node" />
+import * as sigstore from './types/sigstore';
+import { KeySelector } from './verify';
+export * as utils from './sigstore-utils';
+export { SerializedBundle as Bundle, SerializedEnvelope as Envelope, } from './types/sigstore';
+export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev";
+export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev";
+interface TLogOptions {
+    rekorURL?: string;
+}
+export type SignOptions = {
+    fulcioURL?: string;
+    identityToken?: string;
+    oidcIssuer?: string;
+    oidcClientID?: string;
+    oidcClientSecret?: string;
+} & TLogOptions;
+export type VerifyOptions = {
+    ctLogThreshold?: number;
+    tlogThreshold?: number;
+    certificateIssuer?: string;
+    certificateIdentityEmail?: string;
+    certificateIdentityURI?: string;
+    certificateOIDs?: Record<string, string>;
+    keySelector?: KeySelector;
+} & TLogOptions;
+type Bundle = sigstore.SerializedBundle;
+export declare function sign(payload: Buffer, options?: SignOptions): Promise<Bundle>;
+export declare function attest(payload: Buffer, payloadType: string, options?: SignOptions): Promise<Bundle>;
+export declare function verify(bundle: Bundle, payload?: Buffer, options?: VerifyOptions): Promise<void>;
diff --git a/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js
new file mode 100644
index 0000000000000..ef8fb2058a47e
--- /dev/null
+++ b/node_modules/sigstore/dist/sigstore.js
@@ -0,0 +1,188 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verify = exports.attest = exports.sign = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.utils = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+const ca_1 = require("./ca");
+const identity_1 = __importDefault(require("./identity"));
+const sign_1 = require("./sign");
+const tlog_1 = require("./tlog");
+const tuf = __importStar(require("./tuf"));
+const sigstore = __importStar(require("./types/sigstore"));
+const verify_1 = require("./verify");
+exports.utils = __importStar(require("./sigstore-utils"));
+exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
+exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
+function createCAClient(options) {
+    return new ca_1.CAClient({
+        fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL,
+    });
+}
+function createTLogClient(options) {
+    return new tlog_1.TLogClient({
+        rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
+    });
+}
+async function sign(payload, options = {}) {
+    const ca = createCAClient(options);
+    const tlog = createTLogClient(options);
+    const idps = configureIdentityProviders(options);
+    const signer = new sign_1.Signer({
+        ca,
+        tlog,
+        identityProviders: idps,
+    });
+    const bundle = await signer.signBlob(payload);
+    return sigstore.Bundle.toJSON(bundle);
+}
+exports.sign = sign;
+async function attest(payload, payloadType, options = {}) {
+    const ca = createCAClient(options);
+    const tlog = createTLogClient(options);
+    const idps = configureIdentityProviders(options);
+    const signer = new sign_1.Signer({
+        ca,
+        tlog,
+        identityProviders: idps,
+    });
+    const bundle = await signer.signAttestation(payload, payloadType);
+    return sigstore.Bundle.toJSON(bundle);
+}
+exports.attest = attest;
+async function verify(bundle, payload, options = {}) {
+    const cacheDir = defaultCacheDir();
+    const trustedRoot = await tuf.getTrustedRoot(cacheDir);
+    const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
+    const deserializedBundle = sigstore.bundleFromJSON(bundle);
+    const opts = collectArtifactVerificationOptions(options);
+    return verifier.verify(deserializedBundle, opts, payload);
+}
+exports.verify = verify;
+// Translates the IdenityProviderOptions into a list of Providers which
+// should be queried to retrieve an identity token.
+function configureIdentityProviders(options) {
+    const idps = [];
+    const token = options.identityToken;
+    // If an explicit identity token is provided, use that. Setup a dummy
+    // provider that just returns the token. Otherwise, setup the CI context
+    // provider and (optionally) the OAuth provider.
+    if (token) {
+        idps.push({ getToken: () => Promise.resolve(token) });
+    }
+    else {
+        idps.push(identity_1.default.ciContextProvider());
+        if (options.oidcIssuer && options.oidcClientID) {
+            idps.push(identity_1.default.oauthProvider(options.oidcIssuer, options.oidcClientID, options.oidcClientSecret));
+        }
+    }
+    return idps;
+}
+function defaultCacheDir() {
+    let cacheRootDir = os_1.default.homedir();
+    try {
+        fs_1.default.accessSync(os_1.default.homedir(), fs_1.default.constants.W_OK | fs_1.default.constants.R_OK);
+    }
+    catch (e) {
+        cacheRootDir = os_1.default.tmpdir();
+    }
+    return path_1.default.join(cacheRootDir, '.sigstore', 'js-root');
+}
+// Assembles the AtifactVerificationOptions from the supplied VerifyOptions.
+function collectArtifactVerificationOptions(options) {
+    // The trusted signers are only used if the options contain a certificate
+    // issuer
+    let signers;
+    if (options.certificateIssuer) {
+        let san = undefined;
+        if (options.certificateIdentityEmail) {
+            san = {
+                type: sigstore.SubjectAlternativeNameType.EMAIL,
+                identity: {
+                    $case: 'value',
+                    value: options.certificateIdentityEmail,
+                },
+            };
+        }
+        else if (options.certificateIdentityURI) {
+            san = {
+                type: sigstore.SubjectAlternativeNameType.URI,
+                identity: {
+                    $case: 'value',
+                    value: options.certificateIdentityURI,
+                },
+            };
+        }
+        const oids = Object.entries(options.certificateOIDs || {}).map(([oid, value]) => ({
+            oid: { id: oid.split('.').map((s) => parseInt(s, 10)) },
+            value: Buffer.from(value),
+        }));
+        signers = {
+            $case: 'certificateIdentities',
+            certificateIdentities: {
+                identities: [
+                    {
+                        issuer: options.certificateIssuer,
+                        san: san,
+                        oids: oids,
+                    },
+                ],
+            },
+        };
+    }
+    // Construct the artifact verification options w/ defaults
+    return {
+        ctlogOptions: {
+            disable: false,
+            threshold: options.ctLogThreshold || 1,
+            detachedSct: false,
+        },
+        tlogOptions: {
+            disable: false,
+            threshold: options.tlogThreshold || 1,
+            performOnlineVerification: false,
+        },
+        signers,
+    };
+}
diff --git a/node_modules/sigstore/dist/tlog/format.d.ts b/node_modules/sigstore/dist/tlog/format.d.ts
new file mode 100644
index 0000000000000..92251b3ac9ff9
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/format.d.ts
@@ -0,0 +1,6 @@
+/// <reference types="node" />
+import { SignatureMaterial } from '../types/signature';
+import { Envelope } from '../types/sigstore';
+import { HashedRekordKind, IntotoKind } from './types';
+export declare function toProposedHashedRekordEntry(digest: Buffer, signature: SignatureMaterial): HashedRekordKind;
+export declare function toProposedIntotoEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): IntotoKind;
diff --git a/node_modules/sigstore/dist/tlog/format.js b/node_modules/sigstore/dist/tlog/format.js
new file mode 100644
index 0000000000000..67077090455a1
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/format.js
@@ -0,0 +1,105 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = void 0;
+const util_1 = require("../util");
+const types_1 = require("./types");
+const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1';
+const DEFAULT_INTOTO_API_VERSION = '0.0.2';
+// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
+// and signature
+function toProposedHashedRekordEntry(digest, signature) {
+    const hexDigest = digest.toString('hex');
+    const b64Signature = signature.signature.toString('base64');
+    const b64Key = util_1.encoding.base64Encode(toPublicKey(signature));
+    return {
+        apiVersion: DEFAULT_HASHEDREKORD_API_VERSION,
+        kind: types_1.HASHEDREKORD_KIND,
+        spec: {
+            data: {
+                hash: {
+                    algorithm: 'sha256',
+                    value: hexDigest,
+                },
+            },
+            signature: {
+                content: b64Signature,
+                publicKey: {
+                    content: b64Key,
+                },
+            },
+        },
+    };
+}
+exports.toProposedHashedRekordEntry = toProposedHashedRekordEntry;
+// Returns a properly formatted Rekor "intoto" entry for the given DSSE
+// envelope and signature
+function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_API_VERSION) {
+    switch (apiVersion) {
+        case '0.0.2':
+            return toProposedIntotoV002Entry(envelope, signature);
+        default:
+            throw new Error(`Unsupported intoto kind API version: ${apiVersion}`);
+    }
+}
+exports.toProposedIntotoEntry = toProposedIntotoEntry;
+function toProposedIntotoV002Entry(envelope, signature) {
+    // Calculate the value for the payloadHash field in the Rekor entry
+    const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
+    // Calculate the value for the hash field in the Rekor entry
+    const envelopeHash = calculateDSSEHash(envelope);
+    // Collect values for re-creating the DSSE envelope.
+    // Double-encode payload and signature cause that's what Rekor expects
+    const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
+    const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
+    const keyid = envelope.signatures[0].keyid;
+    const publicKey = util_1.encoding.base64Encode(toPublicKey(signature));
+    // Create the envelope portion of the entry. Note the inclusion of the
+    // publicKey in the signature struct is not a standard part of a DSSE
+    // envelope, but is required by Rekor.
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: payload,
+        signatures: [{ sig, publicKey }],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether. We
+    // need to do the same here so that we can properly recreate the entry for
+    // verification.
+    if (keyid.length > 0) {
+        dsse.signatures[0].keyid = keyid;
+    }
+    return {
+        apiVersion: '0.0.2',
+        kind: types_1.INTOTO_KIND,
+        spec: {
+            content: {
+                envelope: dsse,
+                hash: { algorithm: 'sha256', value: envelopeHash },
+                payloadHash: { algorithm: 'sha256', value: payloadHash },
+            },
+        },
+    };
+}
+// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
+// There is no standard way to do this, so the scheme we're using as as
+// follows:
+//  * payload is base64 encoded
+//  * signature is base64 encoded (only the first signature is used)
+//  * keyid is included ONLY if it is NOT an empty string
+//  * The resulting JSON is canonicalized and hashed to a hex string
+function calculateDSSEHash(envelope) {
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: envelope.payload.toString('base64'),
+        signatures: [{ sig: envelope.signatures[0].sig.toString('base64') }],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether.
+    if (envelope.signatures[0].keyid.length > 0) {
+        dsse.signatures[0].keyid = envelope.signatures[0].keyid;
+    }
+    return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex');
+}
+function toPublicKey(signature) {
+    return signature.certificates
+        ? signature.certificates[0]
+        : signature.key.value;
+}
diff --git a/node_modules/sigstore/dist/tlog/index.d.ts b/node_modules/sigstore/dist/tlog/index.d.ts
new file mode 100644
index 0000000000000..9d9cc77c70d65
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/index.d.ts
@@ -0,0 +1,21 @@
+/// <reference types="node" />
+import { SignatureMaterial } from '../types/signature';
+import { Bundle, Envelope } from '../types/sigstore';
+interface CreateEntryOptions {
+    fetchOnConflict?: boolean;
+}
+export { Entry, EntryKind, HashedRekordKind } from './types';
+export interface TLog {
+    createMessageSignatureEntry: (digest: Buffer, sigMaterial: SignatureMaterial) => Promise<Bundle>;
+    createDSSEEntry: (envelope: Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions) => Promise<Bundle>;
+}
+export interface TLogClientOptions {
+    rekorBaseURL: string;
+}
+export declare class TLogClient implements TLog {
+    private rekor;
+    constructor(options: TLogClientOptions);
+    createMessageSignatureEntry(digest: Buffer, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise<Bundle>;
+    createDSSEEntry(envelope: Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise<Bundle>;
+    private createEntry;
+}
diff --git a/node_modules/sigstore/dist/tlog/index.js b/node_modules/sigstore/dist/tlog/index.js
new file mode 100644
index 0000000000000..c2734808fd1d9
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/index.js
@@ -0,0 +1,67 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TLogClient = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const client_1 = require("../client");
+const error_1 = require("../client/error");
+const error_2 = require("../error");
+const sigstore_1 = require("../types/sigstore");
+const format_1 = require("./format");
+class TLogClient {
+    constructor(options) {
+        this.rekor = new client_1.Rekor({ baseURL: options.rekorBaseURL });
+    }
+    async createMessageSignatureEntry(digest, sigMaterial, options = {}) {
+        const proposedEntry = (0, format_1.toProposedHashedRekordEntry)(digest, sigMaterial);
+        const entry = await this.createEntry(proposedEntry, options.fetchOnConflict);
+        return sigstore_1.bundle.toMessageSignatureBundle(digest, sigMaterial, entry);
+    }
+    async createDSSEEntry(envelope, sigMaterial, options = {}) {
+        const proposedEntry = (0, format_1.toProposedIntotoEntry)(envelope, sigMaterial);
+        const entry = await this.createEntry(proposedEntry, options.fetchOnConflict);
+        return sigstore_1.bundle.toDSSEBundle(envelope, sigMaterial, entry);
+    }
+    async createEntry(proposedEntry, fetchOnConflict = false) {
+        let entry;
+        try {
+            entry = await this.rekor.createEntry(proposedEntry);
+        }
+        catch (err) {
+            // If the entry already exists, fetch it (if enabled)
+            if (entryExistsError(err) && fetchOnConflict) {
+                // Grab the UUID of the existing entry from the location header
+                const uuid = err.location.split('/').pop() || '';
+                try {
+                    entry = await this.rekor.getEntry(uuid);
+                }
+                catch (err) {
+                    throw new error_2.InternalError('error fetching tlog entry', err);
+                }
+            }
+            else {
+                throw new error_2.InternalError('error creating tlog entry', err);
+            }
+        }
+        return entry;
+    }
+}
+exports.TLogClient = TLogClient;
+function entryExistsError(value) {
+    return (value instanceof error_1.HTTPError &&
+        value.statusCode === 409 &&
+        value.location !== undefined);
+}
diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts b/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts
new file mode 100644
index 0000000000000..bfe4e83aecdb1
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts
@@ -0,0 +1,50 @@
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+/**
+ * Schema for Rekord objects
+ */
+export type RekorSchema = HashedRekorV001Schema;
+/**
+ * Schema for Hashed Rekord object
+ */
+export interface HashedRekorV001Schema {
+    /**
+     * Information about the detached signature associated with the entry
+     */
+    signature: {
+        /**
+         * Specifies the content of the signature inline within the document
+         */
+        content?: string;
+        /**
+         * The public key that can verify the signature; this can also be an X509 code signing certificate that contains the raw public key information
+         */
+        publicKey?: {
+            /**
+             * Specifies the content of the public key or code signing certificate inline within the document
+             */
+            content?: string;
+        };
+    };
+    /**
+     * Information about the content associated with the entry
+     */
+    data: {
+        /**
+         * Specifies the hash algorithm and value for the content
+         */
+        hash?: {
+            /**
+             * The hashing function used to compute the hash value
+             */
+            algorithm: "sha256";
+            /**
+             * The hash value for the content
+             */
+            value: string;
+        };
+    };
+}
diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js b/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js
new file mode 100644
index 0000000000000..5383a370094cd
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js
@@ -0,0 +1,8 @@
+"use strict";
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts b/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts
new file mode 100644
index 0000000000000..c60c7e0a637ca
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts
@@ -0,0 +1,131 @@
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+/**
+ * Intoto for Rekord objects
+ */
+export type IntotoSchema = IntotoV001Schema | IntotoV002Schema;
+/**
+ * Schema for intoto object
+ */
+export interface IntotoV001Schema {
+    content: {
+        /**
+         * envelope
+         */
+        envelope?: string;
+        /**
+         * Specifies the hash algorithm and value encompassing the entire signed envelope
+         */
+        hash?: {
+            /**
+             * The hashing function used to compute the hash value
+             */
+            algorithm: "sha256";
+            /**
+             * The hash value for the archive
+             */
+            value: string;
+        };
+        /**
+         * Specifies the hash algorithm and value covering the payload within the DSSE envelope
+         */
+        payloadHash?: {
+            /**
+             * The hashing function used to compute the hash value
+             */
+            algorithm: "sha256";
+            /**
+             * The hash value for the envelope's payload
+             */
+            value: string;
+        };
+    };
+    /**
+     * The public key that can verify the signature
+     */
+    publicKey: string;
+}
+/**
+ * Schema for intoto object
+ */
+export interface IntotoV002Schema {
+    content: {
+        /**
+         * dsse envelope
+         */
+        envelope?: {
+            /**
+             * payload of the envelope
+             */
+            payload?: string;
+            /**
+             * type describing the payload
+             */
+            payloadType: string;
+            /**
+             * collection of all signatures of the envelope's payload
+             *
+             * @minItems 1
+             */
+            signatures: [
+                {
+                    /**
+                     * optional id of the key used to create the signature
+                     */
+                    keyid?: string;
+                    /**
+                     * signature of the payload
+                     */
+                    sig?: string;
+                    /**
+                     * public key that corresponds to this signature
+                     */
+                    publicKey?: string;
+                },
+                ...{
+                    /**
+                     * optional id of the key used to create the signature
+                     */
+                    keyid?: string;
+                    /**
+                     * signature of the payload
+                     */
+                    sig?: string;
+                    /**
+                     * public key that corresponds to this signature
+                     */
+                    publicKey?: string;
+                }[]
+            ];
+        };
+        /**
+         * Specifies the hash algorithm and value encompassing the entire signed envelope
+         */
+        hash?: {
+            /**
+             * The hashing function used to compute the hash value
+             */
+            algorithm: "sha256";
+            /**
+             * The hash value for the archive
+             */
+            value: string;
+        };
+        /**
+         * Specifies the hash algorithm and value covering the payload within the DSSE envelope
+         */
+        payloadHash?: {
+            /**
+             * The hashing function used to compute the hash value
+             */
+            algorithm: "sha256";
+            /**
+             * The hash value of the payload
+             */
+            value: string;
+        };
+    };
+}
diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js b/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js
new file mode 100644
index 0000000000000..5383a370094cd
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js
@@ -0,0 +1,8 @@
+"use strict";
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/tlog/types/index.d.ts b/node_modules/sigstore/dist/tlog/types/index.d.ts
new file mode 100644
index 0000000000000..06be133af5fcf
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/index.d.ts
@@ -0,0 +1,38 @@
+import { HashedRekorV001Schema } from './__generated__/hashedrekord';
+import { IntotoV001Schema, IntotoV002Schema } from './__generated__/intoto';
+export declare const INTOTO_KIND = "intoto";
+export declare const HASHEDREKORD_KIND = "hashedrekord";
+export type HashedRekordKind = {
+    apiVersion: '0.0.1';
+    kind: typeof HASHEDREKORD_KIND;
+    spec: HashedRekorV001Schema;
+};
+export type IntotoKind = {
+    apiVersion: '0.0.1';
+    kind: typeof INTOTO_KIND;
+    spec: IntotoV001Schema;
+} | {
+    apiVersion: '0.0.2';
+    kind: typeof INTOTO_KIND;
+    spec: IntotoV002Schema;
+};
+export type EntryKind = HashedRekordKind | IntotoKind;
+export interface Entry {
+    uuid: string;
+    body: string;
+    integratedTime: number;
+    logID: string;
+    logIndex: number;
+    verification: EntryVerification;
+    attestation?: object;
+}
+export interface EntryVerification {
+    inclusionProof: InclusionProof;
+    signedEntryTimestamp: string;
+}
+export interface InclusionProof {
+    hashes: string[];
+    logIndex: number;
+    rootHash: string;
+    treeSize: number;
+}
diff --git a/node_modules/sigstore/dist/tlog/types/index.js b/node_modules/sigstore/dist/tlog/types/index.js
new file mode 100644
index 0000000000000..d6394a95c8397
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/types/index.js
@@ -0,0 +1,5 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HASHEDREKORD_KIND = exports.INTOTO_KIND = void 0;
+exports.INTOTO_KIND = 'intoto';
+exports.HASHEDREKORD_KIND = 'hashedrekord';
diff --git a/node_modules/sigstore/dist/tlog/verify/body.d.ts b/node_modules/sigstore/dist/tlog/verify/body.d.ts
new file mode 100644
index 0000000000000..17de4f5c9698a
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/body.d.ts
@@ -0,0 +1,2 @@
+import * as sigstore from '../../types/sigstore';
+export declare function verifyTLogBody(entry: sigstore.VerifiableTransparencyLogEntry, bundleContent: sigstore.Bundle['content']): boolean;
diff --git a/node_modules/sigstore/dist/tlog/verify/body.js b/node_modules/sigstore/dist/tlog/verify/body.js
new file mode 100644
index 0000000000000..086e068a30dcb
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/body.js
@@ -0,0 +1,113 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogBody = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+const TLOG_MISMATCH_ERROR_MSG = 'bundle content and tlog entry do not match';
+// Compare the given tlog entry to the given bundle
+function verifyTLogBody(entry, bundleContent) {
+    const { kind, version } = entry.kindVersion;
+    const body = JSON.parse(entry.canonicalizedBody.toString('utf8'));
+    try {
+        if (kind !== body.kind || version !== body.apiVersion) {
+            throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+        }
+        switch (body.kind) {
+            case 'intoto':
+                verifyIntotoTLogBody(body, bundleContent);
+                break;
+            case 'hashedrekord':
+                verifyHashedRekordTLogBody(body, bundleContent);
+                break;
+            default:
+                throw new error_1.VerificationError(`unsupported kind in tlog entry: ${kind}`);
+        }
+        return true;
+    }
+    catch (e) {
+        return false;
+    }
+}
+exports.verifyTLogBody = verifyTLogBody;
+// Compare the given intoto tlog entry to the given bundle
+function verifyIntotoTLogBody(tlogEntry, content) {
+    if (content?.$case !== 'dsseEnvelope') {
+        throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`);
+    }
+    const dsse = content.dsseEnvelope;
+    switch (tlogEntry.apiVersion) {
+        case '0.0.2':
+            verifyIntoto002TLogBody(tlogEntry, dsse);
+            break;
+        default:
+            throw new error_1.VerificationError(`unsupported intoto version: ${tlogEntry.apiVersion}`);
+    }
+}
+// Compare the given hashedrekord tlog entry to the given bundle
+function verifyHashedRekordTLogBody(tlogEntry, content) {
+    if (content?.$case !== 'messageSignature') {
+        throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`);
+    }
+    const messageSignature = content.messageSignature;
+    switch (tlogEntry.apiVersion) {
+        case '0.0.1':
+            verifyHashedrekor001TLogBody(tlogEntry, messageSignature);
+            break;
+        default:
+            throw new error_1.VerificationError(`unsupported hashedrekord version: ${tlogEntry.apiVersion}`);
+    }
+}
+// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope.
+function verifyIntoto002TLogBody(tlogEntry, dsse) {
+    // Collect all of the signatures from the DSSE envelope
+    // Turns them into base64-encoded strings for comparison
+    const dsseSigs = dsse.signatures.map((signature) => signature.sig.toString('base64'));
+    // Collect all of the signatures from the tlog entry
+    // Remember that tlog signastures are double base64-encoded
+    const tlogSigs = tlogEntry.spec.content.envelope?.signatures.map((signature) => (signature.sig ? util_1.encoding.base64Decode(signature.sig) : ''));
+    // Ensure the bundle's DSSE and the tlog entry contain the same number of signatures
+    if (dsseSigs.length !== tlogSigs?.length) {
+        throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+    }
+    // Ensure that every signature in the bundle's DSSE is present in the tlog entry
+    if (!dsseSigs.every((dsseSig) => tlogSigs.includes(dsseSig))) {
+        throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+    }
+    // Ensure the digest of the bundle's DSSE payload matches the digest in the
+    // tlog entry
+    const dssePayloadHash = util_1.crypto.hash(dsse.payload).toString('hex');
+    if (dssePayloadHash !== tlogEntry.spec.content.payloadHash?.value) {
+        throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+    }
+}
+// Compare the given hashedrekord v0.0.1 tlog entry to the given message
+// signature
+function verifyHashedrekor001TLogBody(tlogEntry, messageSignature) {
+    // Ensure that the bundles message signature matches the tlog entry
+    const msgSig = messageSignature.signature.toString('base64');
+    const tlogSig = tlogEntry.spec.signature.content;
+    if (msgSig !== tlogSig) {
+        throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+    }
+    // Ensure that the bundle's message digest matches the tlog entry
+    const msgDigest = messageSignature.messageDigest?.digest.toString('hex');
+    const tlogDigest = tlogEntry.spec.data.hash?.value;
+    if (msgDigest !== tlogDigest) {
+        throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG);
+    }
+}
diff --git a/node_modules/sigstore/dist/tlog/verify/index.d.ts b/node_modules/sigstore/dist/tlog/verify/index.d.ts
new file mode 100644
index 0000000000000..8ab42760389ad
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/index.d.ts
@@ -0,0 +1,2 @@
+import * as sigstore from '../../types/sigstore';
+export declare function verifyTLogEntries(bundle: sigstore.BundleWithVerificationMaterial, trustedRoot: sigstore.TrustedRoot, options: sigstore.ArtifactVerificationOptions_TlogOptions): void;
diff --git a/node_modules/sigstore/dist/tlog/verify/index.js b/node_modules/sigstore/dist/tlog/verify/index.js
new file mode 100644
index 0000000000000..ad655b643e109
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/index.js
@@ -0,0 +1,75 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogEntries = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const sigstore = __importStar(require("../../types/sigstore"));
+const body_1 = require("./body");
+const set_1 = require("./set");
+// Verifies that the number of tlog entries that pass offline verification
+// is greater than or equal to the threshold specified in the options.
+function verifyTLogEntries(bundle, trustedRoot, options) {
+    if (options.performOnlineVerification) {
+        throw new error_1.VerificationError('Online verification not implemented');
+    }
+    // Extract the signing cert, if available
+    const signingCert = sigstore.signingCertificate(bundle);
+    // Iterate over the tlog entries and verify each one
+    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryOffline(entry, bundle.content, trustedRoot.tlogs, signingCert));
+    if (verifiedEntries.length < options.threshold) {
+        throw new error_1.VerificationError('tlog verification failed');
+    }
+}
+exports.verifyTLogEntries = verifyTLogEntries;
+function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
+    // Check that the TLog entry has the fields necessary for verification
+    if (!sigstore.isVerifiableTransparencyLogEntry(entry)) {
+        return false;
+    }
+    // If there is a signing certificate availble, check that the tlog integrated
+    // time is within the certificate's validity period; otherwise, skip this
+    // check.
+    const verifyTLogIntegrationTime = signingCert
+        ? () => signingCert.validForDate(new Date(Number(entry.integratedTime) * 1000))
+        : () => true;
+    return ((0, body_1.verifyTLogBody)(entry, bundleContent) &&
+        (0, set_1.verifyTLogSET)(entry, tlogs) &&
+        verifyTLogIntegrationTime());
+}
diff --git a/node_modules/sigstore/dist/tlog/verify/set.d.ts b/node_modules/sigstore/dist/tlog/verify/set.d.ts
new file mode 100644
index 0000000000000..278317489a7e4
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/set.d.ts
@@ -0,0 +1,2 @@
+import * as sigstore from '../../types/sigstore';
+export declare function verifyTLogSET(entry: sigstore.VerifiableTransparencyLogEntry, tlogs: sigstore.TransparencyLogInstance[]): boolean;
diff --git a/node_modules/sigstore/dist/tlog/verify/set.js b/node_modules/sigstore/dist/tlog/verify/set.js
new file mode 100644
index 0000000000000..89a544283d73d
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/set.js
@@ -0,0 +1,67 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogSET = void 0;
+const util_1 = require("../../util");
+// Verifies the SET for the given entry against the list of trusted
+// transparency logs. Returns true if the SET can be verified against at least
+// one of the trusted logs; otherwise, returns false.
+function verifyTLogSET(entry, tlogs) {
+    // Filter the list of tlog instances to only those which might be able to
+    // verify the SET
+    const validTLogs = filterTLogInstances(tlogs, entry.logId.keyId, entry.integratedTime);
+    // Check to see if we can verify the SET against any of the valid tlogs
+    return validTLogs.some((tlog) => {
+        if (!tlog.publicKey?.rawBytes) {
+            return false;
+        }
+        const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
+        // Re-create the original Rekor verification payload
+        const payload = toVerificationPayload(entry);
+        // Canonicalize the payload and turn into a buffer for verification
+        const data = Buffer.from(util_1.json.canonicalize(payload), 'utf8');
+        // Extract the SET from the tlog entry
+        const signature = entry.inclusionPromise.signedEntryTimestamp;
+        return util_1.crypto.verifyBlob(data, publicKey, signature);
+    });
+}
+exports.verifyTLogSET = verifyTLogSET;
+// Returns a properly formatted "VerificationPayload" for one of the
+// transaction log entires in the given bundle which can be used for SET
+// verification.
+function toVerificationPayload(entry) {
+    const { integratedTime, logIndex, logId, canonicalizedBody } = entry;
+    return {
+        body: canonicalizedBody.toString('base64'),
+        integratedTime: Number(integratedTime),
+        logIndex: Number(logIndex),
+        logID: logId.keyId.toString('hex'),
+    };
+}
+// Filter the list of tlog instances to only those which match the given log
+// ID and have public keys which are valid for the given integrated time.
+function filterTLogInstances(tlogInstances, logID, integratedTime) {
+    const targetDate = new Date(Number(integratedTime) * 1000);
+    return tlogInstances.filter((tlog) => {
+        // If the log IDs don't match, we can't use this tlog
+        if (!tlog.logId?.keyId.equals(logID)) {
+            return false;
+        }
+        // If the tlog doesn't have a public key, we can't use it
+        const publicKey = tlog.publicKey;
+        if (publicKey === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a rawBytes field, we can't use it
+        if (publicKey.rawBytes === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a validFor field, we don't need to check it
+        if (publicKey.validFor === undefined) {
+            return true;
+        }
+        // Check that the integrated time is within the validFor range
+        return (publicKey.validFor.start &&
+            publicKey.validFor.start <= targetDate &&
+            (!publicKey.validFor.end || targetDate <= publicKey.validFor.end));
+    });
+}
diff --git a/node_modules/sigstore/dist/tuf/index.d.ts b/node_modules/sigstore/dist/tuf/index.d.ts
new file mode 100644
index 0000000000000..349ff08b3be4b
--- /dev/null
+++ b/node_modules/sigstore/dist/tuf/index.d.ts
@@ -0,0 +1,2 @@
+import * as sigstore from '../types/sigstore';
+export declare function getTrustedRoot(cacheDir: string): Promise<sigstore.TrustedRoot>;
diff --git a/node_modules/sigstore/dist/tuf/index.js b/node_modules/sigstore/dist/tuf/index.js
new file mode 100644
index 0000000000000..1aea238ef32ff
--- /dev/null
+++ b/node_modules/sigstore/dist/tuf/index.js
@@ -0,0 +1,76 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getTrustedRoot = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const path_1 = __importDefault(require("path"));
+const tuf_js_1 = require("tuf-js");
+const trustroot_1 = require("./trustroot");
+async function getTrustedRoot(cacheDir) {
+    initTufCache(cacheDir);
+    const repoMap = initRepoMap(cacheDir);
+    const repoClients = Object.entries(repoMap.repositories).map(([name, urls]) => initClient(name, urls[0], cacheDir));
+    // TODO: Add support for multiple repositories. For now, we just use the first
+    // one (the production Sigstore TUF repository).
+    const fetcher = new trustroot_1.TrustedRootFetcher(repoClients[0]);
+    return fetcher.getTrustedRoot();
+}
+exports.getTrustedRoot = getTrustedRoot;
+// Initializes the root TUF cache directory
+function initTufCache(cacheDir) {
+    if (!fs_1.default.existsSync(cacheDir)) {
+        fs_1.default.mkdirSync(cacheDir, { recursive: true });
+    }
+}
+// Initializes the repo map (copying it to the cache root dir) and returns the
+// content of the repository map.
+function initRepoMap(rootDir) {
+    const mapDest = path_1.default.join(rootDir, 'map.json');
+    if (!fs_1.default.existsSync(mapDest)) {
+        const mapSrc = require.resolve('../../store/map.json');
+        fs_1.default.copyFileSync(mapSrc, mapDest);
+    }
+    const buf = fs_1.default.readFileSync(mapDest);
+    return JSON.parse(buf.toString('utf-8'));
+}
+function initClient(name, url, rootDir) {
+    const repoCachePath = path_1.default.join(rootDir, name);
+    const targetCachePath = path_1.default.join(repoCachePath, 'targets');
+    const tufRootDest = path_1.default.join(repoCachePath, 'root.json');
+    // Only copy the TUF trusted root if it doesn't already exist. It's possible
+    // that the cached root has already been updated, so we don't want to roll it
+    // back.
+    if (!fs_1.default.existsSync(tufRootDest)) {
+        const tufRootSrc = require.resolve(`../../store/${name}-root.json`);
+        fs_1.default.mkdirSync(repoCachePath);
+        fs_1.default.copyFileSync(tufRootSrc, tufRootDest);
+    }
+    if (!fs_1.default.existsSync(targetCachePath)) {
+        fs_1.default.mkdirSync(targetCachePath);
+    }
+    // TODO: Is there some better way to derive the base URL for the targets?
+    // Hard-coding for now based on current Sigstore TUF repo layout.
+    return new tuf_js_1.Updater({
+        metadataBaseUrl: url,
+        targetBaseUrl: `${url}/targets`,
+        metadataDir: repoCachePath,
+        targetDir: targetCachePath,
+    });
+}
diff --git a/node_modules/sigstore/dist/tuf/trustroot.d.ts b/node_modules/sigstore/dist/tuf/trustroot.d.ts
new file mode 100644
index 0000000000000..615fffae62a80
--- /dev/null
+++ b/node_modules/sigstore/dist/tuf/trustroot.d.ts
@@ -0,0 +1,11 @@
+import { Updater } from 'tuf-js';
+import * as sigstore from '../types/sigstore';
+export declare class TrustedRootFetcher {
+    private tuf;
+    constructor(tuf: Updater);
+    getTrustedRoot(): Promise<sigstore.TrustedRoot>;
+    private allTargets;
+    private getTLogKeys;
+    private getCAKeys;
+    private readTargetBytes;
+}
diff --git a/node_modules/sigstore/dist/tuf/trustroot.js b/node_modules/sigstore/dist/tuf/trustroot.js
new file mode 100644
index 0000000000000..dcf491cdaefe8
--- /dev/null
+++ b/node_modules/sigstore/dist/tuf/trustroot.js
@@ -0,0 +1,163 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRootFetcher = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const error_1 = require("../error");
+const sigstore = __importStar(require("../types/sigstore"));
+const util_1 = require("../util");
+const TRUSTED_ROOT_MEDIA_TYPE = 'application/vnd.dev.sigstore.trustedroot+json;version=0.1';
+// Type guard for SigstoreTargetMetadata
+function isTargetMetadata(m) {
+    return (m !== undefined &&
+        m !== null &&
+        typeof m === 'object' &&
+        'status' in m &&
+        'usage' in m &&
+        'uri' in m);
+}
+class TrustedRootFetcher {
+    constructor(tuf) {
+        this.tuf = tuf;
+    }
+    // Assembles a TrustedRoot from the targets in the TUF repo
+    async getTrustedRoot() {
+        // Get all available targets
+        const targets = await this.allTargets();
+        const cas = await this.getCAKeys(targets, 'Fulcio');
+        const ctlogs = await this.getTLogKeys(targets, 'CTFE');
+        const tlogs = await this.getTLogKeys(targets, 'Rekor');
+        return {
+            mediaType: TRUSTED_ROOT_MEDIA_TYPE,
+            certificateAuthorities: cas,
+            ctlogs: ctlogs,
+            tlogs: tlogs,
+            timestampAuthorities: [],
+        };
+    }
+    // Retrieves the list of TUF targets.
+    // NOTE: This is a HACK to get around the fact that the TUF library doesn't
+    // expose the list of targets. This is a temporary solution until TUF comes up
+    // with a story for target discovery.
+    // https://docs.google.com/document/d/1rWHAM2qCUtnjWD4lOrGWE2EIDLoA7eSy4-jB66Wgh0o
+    async allTargets() {
+        try {
+            await this.tuf.refresh();
+        }
+        catch (e) {
+            throw new error_1.InternalError('error refreshing trust metadata');
+        }
+        return Object.values(
+        // eslint-disable-next-line @typescript-eslint/no-explicit-any
+        this.tuf.trustedSet.targets?.signed.targets || {});
+    }
+    // Filters the supplied list of targets to those with the specified usage
+    // and returns a new TransparencyLogInstance for each with the associated
+    // public key populated.
+    async getTLogKeys(targets, usage) {
+        const filteredTargets = filterByUsage(targets, usage);
+        return Promise.all(filteredTargets.map(async (target) => {
+            const keyBytes = await this.readTargetBytes(target);
+            const uri = isTargetMetadata(target.custom.sigstore)
+                ? target.custom.sigstore.uri
+                : '';
+            // The log ID is not present in the Sigstore target metadata, but
+            // can be derived by hashing the contents of the public key.
+            return {
+                baseUrl: uri,
+                hashAlgorithm: sigstore.HashAlgorithm.SHA2_256,
+                logId: { keyId: util_1.crypto.hash(keyBytes) },
+                publicKey: {
+                    keyDetails: sigstore.PublicKeyDetails.PKIX_ECDSA_P256_SHA_256,
+                    rawBytes: keyBytes,
+                },
+            };
+        }));
+    }
+    // Filters the supplied list of targets to those with the specified usage
+    // and returns a new CertificateAuthority populated with all of the associated
+    // certificates.
+    // NOTE: The Sigstore target metadata does NOT provide any mechanism to link
+    // related certificates (e.g. a root and intermediate). As a result, we
+    // assume that all certificates located here are part of the same chain.
+    // This works out OK since our certificate chain verification code tries all
+    // possible permutations of the certificates until it finds one that results
+    // in a valid, trusted chain.
+    async getCAKeys(targets, usage) {
+        const filteredTargets = filterByUsage(targets, usage);
+        const certs = await Promise.all(filteredTargets.map(async (target) => await this.readTargetBytes(target)));
+        return [
+            {
+                uri: '',
+                subject: undefined,
+                validFor: { start: new Date(0) },
+                certChain: {
+                    certificates: certs.map((cert) => ({ rawBytes: cert })),
+                },
+            },
+        ];
+    }
+    // Reads the contents of the specified target file as a DER-encoded buffer.
+    async readTargetBytes(target) {
+        try {
+            let path = await this.tuf.findCachedTarget(target);
+            // An empty path here means the target has not been cached locally, or is
+            // out of date. In either case, we need to download it.
+            if (!path) {
+                path = await this.tuf.downloadTarget(target);
+            }
+            const file = fs_1.default.readFileSync(path);
+            return util_1.pem.toDER(file.toString('utf-8'));
+        }
+        catch (err) {
+            throw new error_1.InternalError(`error reading key/certificate for ${target.path}`);
+        }
+    }
+}
+exports.TrustedRootFetcher = TrustedRootFetcher;
+function filterByUsage(targets, usage) {
+    return targets.filter((target) => {
+        const meta = target.custom.sigstore;
+        return isTargetMetadata(meta) && meta.usage === usage;
+    });
+}
diff --git a/node_modules/sigstore/dist/types/signature.d.ts b/node_modules/sigstore/dist/types/signature.d.ts
new file mode 100644
index 0000000000000..40b4fbe6339ca
--- /dev/null
+++ b/node_modules/sigstore/dist/types/signature.d.ts
@@ -0,0 +1,16 @@
+/// <reference types="node" />
+import { Envelope } from './sigstore';
+import { OneOf } from './utility';
+interface VerificationMaterial {
+    certificates: string[];
+    key: {
+        id?: string;
+        value: string;
+    };
+}
+export type SignatureMaterial = {
+    signature: Buffer;
+} & OneOf<VerificationMaterial>;
+export type SignerFunc = (payload: Buffer) => Promise<SignatureMaterial>;
+export declare function extractSignatureMaterial(dsseEnvelope: Envelope, publicKey: string): SignatureMaterial;
+export {};
diff --git a/node_modules/sigstore/dist/types/signature.js b/node_modules/sigstore/dist/types/signature.js
new file mode 100644
index 0000000000000..339e2a2731b41
--- /dev/null
+++ b/node_modules/sigstore/dist/types/signature.js
@@ -0,0 +1,15 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extractSignatureMaterial = void 0;
+function extractSignatureMaterial(dsseEnvelope, publicKey) {
+    const signature = dsseEnvelope.signatures[0];
+    return {
+        signature: signature.sig,
+        key: {
+            id: signature.keyid,
+            value: publicKey,
+        },
+        certificates: undefined,
+    };
+}
+exports.extractSignatureMaterial = extractSignatureMaterial;
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts
new file mode 100644
index 0000000000000..81422a0075962
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts
@@ -0,0 +1,46 @@
+/// <reference types="node" />
+/** An authenticated message of arbitrary type. */
+export interface Envelope {
+    /**
+     * Message to be signed. (In JSON, this is encoded as base64.)
+     * REQUIRED.
+     */
+    payload: Buffer;
+    /**
+     * String unambiguously identifying how to interpret payload.
+     * REQUIRED.
+     */
+    payloadType: string;
+    /**
+     * Signature over:
+     *     PAE(type, body)
+     * Where PAE is defined as:
+     * PAE(type, body) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(body) + SP + body
+     * +               = concatenation
+     * SP              = ASCII space [0x20]
+     * "DSSEv1"        = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31]
+     * LEN(s)          = ASCII decimal encoding of the byte length of s, with no leading zeros
+     * REQUIRED (length >= 1).
+     */
+    signatures: Signature[];
+}
+export interface Signature {
+    /**
+     * Signature itself. (In JSON, this is encoded as base64.)
+     * REQUIRED.
+     */
+    sig: Buffer;
+    /**
+     * Unauthenticated* hint identifying which public key was used.
+     * OPTIONAL.
+     */
+    keyid: string;
+}
+export declare const Envelope: {
+    fromJSON(object: any): Envelope;
+    toJSON(message: Envelope): unknown;
+};
+export declare const Signature: {
+    fromJSON(object: any): Signature;
+    toJSON(message: Signature): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js b/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js
new file mode 100644
index 0000000000000..715bb1aa5b57d
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.payload !== undefined &&
+            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+        if (message.signatures) {
+            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+        }
+        else {
+            obj.signatures = [];
+        }
+        return obj;
+    },
+};
+function createBaseSignature() {
+    return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+        message.keyid !== undefined && (obj.keyid = message.keyid);
+        return obj;
+    },
+};
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (globalThis.Buffer) {
+        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = globalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (globalThis.Buffer) {
+        return globalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return globalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts
new file mode 100644
index 0000000000000..1b4ed47aadebc
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts
@@ -0,0 +1,52 @@
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+export declare enum FieldBehavior {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FIELD_BEHAVIOR_UNSPECIFIED = 0,
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    OPTIONAL = 1,
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    REQUIRED = 2,
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    OUTPUT_ONLY = 3,
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    INPUT_ONLY = 4,
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    IMMUTABLE = 5,
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    UNORDERED_LIST = 6
+}
+export declare function fieldBehaviorFromJSON(object: any): FieldBehavior;
+export declare function fieldBehaviorToJSON(object: FieldBehavior): string;
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js b/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..f9b57cccdc3d3
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts
new file mode 100644
index 0000000000000..ef43bf01c10c3
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts
@@ -0,0 +1,939 @@
+/// <reference types="node" />
+/**
+ * The protocol compiler can output a FileDescriptorSet containing the .proto
+ * files it parses.
+ */
+export interface FileDescriptorSet {
+    file: FileDescriptorProto[];
+}
+/** Describes a complete .proto file. */
+export interface FileDescriptorProto {
+    /** file name, relative to root of source tree */
+    name: string;
+    /** e.g. "foo", "foo.bar", etc. */
+    package: string;
+    /** Names of files imported by this file. */
+    dependency: string[];
+    /** Indexes of the public imported files in the dependency list above. */
+    publicDependency: number[];
+    /**
+     * Indexes of the weak imported files in the dependency list.
+     * For Google-internal migration only. Do not use.
+     */
+    weakDependency: number[];
+    /** All top-level definitions in this file. */
+    messageType: DescriptorProto[];
+    enumType: EnumDescriptorProto[];
+    service: ServiceDescriptorProto[];
+    extension: FieldDescriptorProto[];
+    options: FileOptions | undefined;
+    /**
+     * This field contains optional information about the original source code.
+     * You may safely remove this entire field without harming runtime
+     * functionality of the descriptors -- the information is needed only by
+     * development tools.
+     */
+    sourceCodeInfo: SourceCodeInfo | undefined;
+    /**
+     * The syntax of the proto file.
+     * The supported values are "proto2" and "proto3".
+     */
+    syntax: string;
+}
+/** Describes a message type. */
+export interface DescriptorProto {
+    name: string;
+    field: FieldDescriptorProto[];
+    extension: FieldDescriptorProto[];
+    nestedType: DescriptorProto[];
+    enumType: EnumDescriptorProto[];
+    extensionRange: DescriptorProto_ExtensionRange[];
+    oneofDecl: OneofDescriptorProto[];
+    options: MessageOptions | undefined;
+    reservedRange: DescriptorProto_ReservedRange[];
+    /**
+     * Reserved field names, which may not be used by fields in the same message.
+     * A given name may only be reserved once.
+     */
+    reservedName: string[];
+}
+export interface DescriptorProto_ExtensionRange {
+    /** Inclusive. */
+    start: number;
+    /** Exclusive. */
+    end: number;
+    options: ExtensionRangeOptions | undefined;
+}
+/**
+ * Range of reserved tag numbers. Reserved tag numbers may not be used by
+ * fields or extension ranges in the same message. Reserved ranges may
+ * not overlap.
+ */
+export interface DescriptorProto_ReservedRange {
+    /** Inclusive. */
+    start: number;
+    /** Exclusive. */
+    end: number;
+}
+export interface ExtensionRangeOptions {
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+/** Describes a field within a message. */
+export interface FieldDescriptorProto {
+    name: string;
+    number: number;
+    label: FieldDescriptorProto_Label;
+    /**
+     * If type_name is set, this need not be set.  If both this and type_name
+     * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+     */
+    type: FieldDescriptorProto_Type;
+    /**
+     * For message and enum types, this is the name of the type.  If the name
+     * starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
+     * rules are used to find the type (i.e. first the nested types within this
+     * message are searched, then within the parent, on up to the root
+     * namespace).
+     */
+    typeName: string;
+    /**
+     * For extensions, this is the name of the type being extended.  It is
+     * resolved in the same manner as type_name.
+     */
+    extendee: string;
+    /**
+     * For numeric types, contains the original text representation of the value.
+     * For booleans, "true" or "false".
+     * For strings, contains the default text contents (not escaped in any way).
+     * For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
+     */
+    defaultValue: string;
+    /**
+     * If set, gives the index of a oneof in the containing type's oneof_decl
+     * list.  This field is a member of that oneof.
+     */
+    oneofIndex: number;
+    /**
+     * JSON name of this field. The value is set by protocol compiler. If the
+     * user has set a "json_name" option on this field, that option's value
+     * will be used. Otherwise, it's deduced from the field's name by converting
+     * it to camelCase.
+     */
+    jsonName: string;
+    options: FieldOptions | undefined;
+    /**
+     * If true, this is a proto3 "optional". When a proto3 field is optional, it
+     * tracks presence regardless of field type.
+     *
+     * When proto3_optional is true, this field must be belong to a oneof to
+     * signal to old proto3 clients that presence is tracked for this field. This
+     * oneof is known as a "synthetic" oneof, and this field must be its sole
+     * member (each proto3 optional field gets its own synthetic oneof). Synthetic
+     * oneofs exist in the descriptor only, and do not generate any API. Synthetic
+     * oneofs must be ordered after all "real" oneofs.
+     *
+     * For message fields, proto3_optional doesn't create any semantic change,
+     * since non-repeated message fields always track presence. However it still
+     * indicates the semantic detail of whether the user wrote "optional" or not.
+     * This can be useful for round-tripping the .proto file. For consistency we
+     * give message fields a synthetic oneof also, even though it is not required
+     * to track presence. This is especially important because the parser can't
+     * tell if a field is a message or an enum, so it must always create a
+     * synthetic oneof.
+     *
+     * Proto2 optional fields do not set this flag, because they already indicate
+     * optional with `LABEL_OPTIONAL`.
+     */
+    proto3Optional: boolean;
+}
+export declare enum FieldDescriptorProto_Type {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    TYPE_DOUBLE = 1,
+    TYPE_FLOAT = 2,
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    TYPE_INT64 = 3,
+    TYPE_UINT64 = 4,
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    TYPE_INT32 = 5,
+    TYPE_FIXED64 = 6,
+    TYPE_FIXED32 = 7,
+    TYPE_BOOL = 8,
+    TYPE_STRING = 9,
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    TYPE_GROUP = 10,
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    TYPE_MESSAGE = 11,
+    /** TYPE_BYTES - New in version 2. */
+    TYPE_BYTES = 12,
+    TYPE_UINT32 = 13,
+    TYPE_ENUM = 14,
+    TYPE_SFIXED32 = 15,
+    TYPE_SFIXED64 = 16,
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    TYPE_SINT32 = 17,
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    TYPE_SINT64 = 18
+}
+export declare function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type;
+export declare function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string;
+export declare enum FieldDescriptorProto_Label {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    LABEL_OPTIONAL = 1,
+    LABEL_REQUIRED = 2,
+    LABEL_REPEATED = 3
+}
+export declare function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label;
+export declare function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string;
+/** Describes a oneof. */
+export interface OneofDescriptorProto {
+    name: string;
+    options: OneofOptions | undefined;
+}
+/** Describes an enum type. */
+export interface EnumDescriptorProto {
+    name: string;
+    value: EnumValueDescriptorProto[];
+    options: EnumOptions | undefined;
+    /**
+     * Range of reserved numeric values. Reserved numeric values may not be used
+     * by enum values in the same enum declaration. Reserved ranges may not
+     * overlap.
+     */
+    reservedRange: EnumDescriptorProto_EnumReservedRange[];
+    /**
+     * Reserved enum value names, which may not be reused. A given name may only
+     * be reserved once.
+     */
+    reservedName: string[];
+}
+/**
+ * Range of reserved numeric values. Reserved values may not be used by
+ * entries in the same enum. Reserved ranges may not overlap.
+ *
+ * Note that this is distinct from DescriptorProto.ReservedRange in that it
+ * is inclusive such that it can appropriately represent the entire int32
+ * domain.
+ */
+export interface EnumDescriptorProto_EnumReservedRange {
+    /** Inclusive. */
+    start: number;
+    /** Inclusive. */
+    end: number;
+}
+/** Describes a value within an enum. */
+export interface EnumValueDescriptorProto {
+    name: string;
+    number: number;
+    options: EnumValueOptions | undefined;
+}
+/** Describes a service. */
+export interface ServiceDescriptorProto {
+    name: string;
+    method: MethodDescriptorProto[];
+    options: ServiceOptions | undefined;
+}
+/** Describes a method of a service. */
+export interface MethodDescriptorProto {
+    name: string;
+    /**
+     * Input and output type names.  These are resolved in the same way as
+     * FieldDescriptorProto.type_name, but must refer to a message type.
+     */
+    inputType: string;
+    outputType: string;
+    options: MethodOptions | undefined;
+    /** Identifies if client streams multiple client messages */
+    clientStreaming: boolean;
+    /** Identifies if server streams multiple server messages */
+    serverStreaming: boolean;
+}
+export interface FileOptions {
+    /**
+     * Sets the Java package where classes generated from this .proto will be
+     * placed.  By default, the proto package is used, but this is often
+     * inappropriate because proto packages do not normally start with backwards
+     * domain names.
+     */
+    javaPackage: string;
+    /**
+     * Controls the name of the wrapper Java class generated for the .proto file.
+     * That class will always contain the .proto file's getDescriptor() method as
+     * well as any top-level extensions defined in the .proto file.
+     * If java_multiple_files is disabled, then all the other classes from the
+     * .proto file will be nested inside the single wrapper outer class.
+     */
+    javaOuterClassname: string;
+    /**
+     * If enabled, then the Java code generator will generate a separate .java
+     * file for each top-level message, enum, and service defined in the .proto
+     * file.  Thus, these types will *not* be nested inside the wrapper class
+     * named by java_outer_classname.  However, the wrapper class will still be
+     * generated to contain the file's getDescriptor() method as well as any
+     * top-level extensions defined in the file.
+     */
+    javaMultipleFiles: boolean;
+    /**
+     * This option does nothing.
+     *
+     * @deprecated
+     */
+    javaGenerateEqualsAndHash: boolean;
+    /**
+     * If set true, then the Java2 code generator will generate code that
+     * throws an exception whenever an attempt is made to assign a non-UTF-8
+     * byte sequence to a string field.
+     * Message reflection will do the same.
+     * However, an extension field still accepts non-UTF-8 byte sequences.
+     * This option has no effect on when used with the lite runtime.
+     */
+    javaStringCheckUtf8: boolean;
+    optimizeFor: FileOptions_OptimizeMode;
+    /**
+     * Sets the Go package where structs generated from this .proto will be
+     * placed. If omitted, the Go package will be derived from the following:
+     *   - The basename of the package import path, if provided.
+     *   - Otherwise, the package statement in the .proto file, if present.
+     *   - Otherwise, the basename of the .proto file, without extension.
+     */
+    goPackage: string;
+    /**
+     * Should generic services be generated in each language?  "Generic" services
+     * are not specific to any particular RPC system.  They are generated by the
+     * main code generators in each language (without additional plugins).
+     * Generic services were the only kind of service generation supported by
+     * early versions of google.protobuf.
+     *
+     * Generic services are now considered deprecated in favor of using plugins
+     * that generate code specific to your particular RPC system.  Therefore,
+     * these default to false.  Old code which depends on generic services should
+     * explicitly set them to true.
+     */
+    ccGenericServices: boolean;
+    javaGenericServices: boolean;
+    pyGenericServices: boolean;
+    phpGenericServices: boolean;
+    /**
+     * Is this file deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for everything in the file, or it will be completely ignored; in the very
+     * least, this is a formalization for deprecating files.
+     */
+    deprecated: boolean;
+    /**
+     * Enables the use of arenas for the proto messages in this file. This applies
+     * only to generated classes for C++.
+     */
+    ccEnableArenas: boolean;
+    /**
+     * Sets the objective c class prefix which is prepended to all objective c
+     * generated classes from this .proto. There is no default.
+     */
+    objcClassPrefix: string;
+    /** Namespace for generated classes; defaults to the package. */
+    csharpNamespace: string;
+    /**
+     * By default Swift generators will take the proto package and CamelCase it
+     * replacing '.' with underscore and use that to prefix the types/symbols
+     * defined. When this options is provided, they will use this value instead
+     * to prefix the types/symbols defined.
+     */
+    swiftPrefix: string;
+    /**
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     */
+    phpClassPrefix: string;
+    /**
+     * Use this option to change the namespace of php generated classes. Default
+     * is empty. When this option is empty, the package name will be used for
+     * determining the namespace.
+     */
+    phpNamespace: string;
+    /**
+     * Use this option to change the namespace of php generated metadata classes.
+     * Default is empty. When this option is empty, the proto file name will be
+     * used for determining the namespace.
+     */
+    phpMetadataNamespace: string;
+    /**
+     * Use this option to change the package of ruby generated classes. Default
+     * is empty. When this option is not set, the package name will be used for
+     * determining the ruby package.
+     */
+    rubyPackage: string;
+    /**
+     * The parser stores options it doesn't recognize here.
+     * See the documentation for the "Options" section above.
+     */
+    uninterpretedOption: UninterpretedOption[];
+}
+/** Generated classes can be optimized for speed or code size. */
+export declare enum FileOptions_OptimizeMode {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    SPEED = 1,
+    /** CODE_SIZE - etc. */
+    CODE_SIZE = 2,
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    LITE_RUNTIME = 3
+}
+export declare function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode;
+export declare function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string;
+export interface MessageOptions {
+    /**
+     * Set true to use the old proto1 MessageSet wire format for extensions.
+     * This is provided for backwards-compatibility with the MessageSet wire
+     * format.  You should not use this for any other reason:  It's less
+     * efficient, has fewer features, and is more complicated.
+     *
+     * The message must be defined exactly as follows:
+     *   message Foo {
+     *     option message_set_wire_format = true;
+     *     extensions 4 to max;
+     *   }
+     * Note that the message cannot have any defined fields; MessageSets only
+     * have extensions.
+     *
+     * All extensions of your type must be singular messages; e.g. they cannot
+     * be int32s, enums, or repeated messages.
+     *
+     * Because this is an option, the above two restrictions are not enforced by
+     * the protocol compiler.
+     */
+    messageSetWireFormat: boolean;
+    /**
+     * Disables the generation of the standard "descriptor()" accessor, which can
+     * conflict with a field of the same name.  This is meant to make migration
+     * from proto1 easier; new code should avoid fields named "descriptor".
+     */
+    noStandardDescriptorAccessor: boolean;
+    /**
+     * Is this message deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for the message, or it will be completely ignored; in the very least,
+     * this is a formalization for deprecating messages.
+     */
+    deprecated: boolean;
+    /**
+     * Whether the message is an automatically generated map entry type for the
+     * maps field.
+     *
+     * For maps fields:
+     *     map<KeyType, ValueType> map_field = 1;
+     * The parsed descriptor looks like:
+     *     message MapFieldEntry {
+     *         option map_entry = true;
+     *         optional KeyType key = 1;
+     *         optional ValueType value = 2;
+     *     }
+     *     repeated MapFieldEntry map_field = 1;
+     *
+     * Implementations may choose not to generate the map_entry=true message, but
+     * use a native map in the target language to hold the keys and values.
+     * The reflection APIs in such implementations still need to work as
+     * if the field is a repeated message field.
+     *
+     * NOTE: Do not set the option in .proto files. Always use the maps syntax
+     * instead. The option should only be implicitly set by the proto compiler
+     * parser.
+     */
+    mapEntry: boolean;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export interface FieldOptions {
+    /**
+     * The ctype option instructs the C++ code generator to use a different
+     * representation of the field than it normally would.  See the specific
+     * options below.  This option is not yet implemented in the open source
+     * release -- sorry, we'll try to include it in a future version!
+     */
+    ctype: FieldOptions_CType;
+    /**
+     * The packed option can be enabled for repeated primitive fields to enable
+     * a more efficient representation on the wire. Rather than repeatedly
+     * writing the tag and type for each element, the entire array is encoded as
+     * a single length-delimited blob. In proto3, only explicit setting it to
+     * false will avoid using packed encoding.
+     */
+    packed: boolean;
+    /**
+     * The jstype option determines the JavaScript type used for values of the
+     * field.  The option is permitted only for 64 bit integral and fixed types
+     * (int64, uint64, sint64, fixed64, sfixed64).  A field with jstype JS_STRING
+     * is represented as JavaScript string, which avoids loss of precision that
+     * can happen when a large value is converted to a floating point JavaScript.
+     * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+     * use the JavaScript "number" type.  The behavior of the default option
+     * JS_NORMAL is implementation dependent.
+     *
+     * This option is an enum to permit additional types to be added, e.g.
+     * goog.math.Integer.
+     */
+    jstype: FieldOptions_JSType;
+    /**
+     * Should this field be parsed lazily?  Lazy applies only to message-type
+     * fields.  It means that when the outer message is initially parsed, the
+     * inner message's contents will not be parsed but instead stored in encoded
+     * form.  The inner message will actually be parsed when it is first accessed.
+     *
+     * This is only a hint.  Implementations are free to choose whether to use
+     * eager or lazy parsing regardless of the value of this option.  However,
+     * setting this option true suggests that the protocol author believes that
+     * using lazy parsing on this field is worth the additional bookkeeping
+     * overhead typically needed to implement it.
+     *
+     * This option does not affect the public interface of any generated code;
+     * all method signatures remain the same.  Furthermore, thread-safety of the
+     * interface is not affected by this option; const methods remain safe to
+     * call from multiple threads concurrently, while non-const methods continue
+     * to require exclusive access.
+     *
+     * Note that implementations may choose not to check required fields within
+     * a lazy sub-message.  That is, calling IsInitialized() on the outer message
+     * may return true even if the inner message has missing required fields.
+     * This is necessary because otherwise the inner message would have to be
+     * parsed in order to perform the check, defeating the purpose of lazy
+     * parsing.  An implementation which chooses not to check required fields
+     * must be consistent about it.  That is, for any particular sub-message, the
+     * implementation must either *always* check its required fields, or *never*
+     * check its required fields, regardless of whether or not the message has
+     * been parsed.
+     *
+     * As of 2021, lazy does no correctness checks on the byte stream during
+     * parsing.  This may lead to crashes if and when an invalid byte stream is
+     * finally parsed upon access.
+     *
+     * TODO(b/211906113):  Enable validation on lazy fields.
+     */
+    lazy: boolean;
+    /**
+     * unverified_lazy does no correctness checks on the byte stream. This should
+     * only be used where lazy with verification is prohibitive for performance
+     * reasons.
+     */
+    unverifiedLazy: boolean;
+    /**
+     * Is this field deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for accessors, or it will be completely ignored; in the very least, this
+     * is a formalization for deprecating fields.
+     */
+    deprecated: boolean;
+    /** For Google-internal migration only. Do not use. */
+    weak: boolean;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export declare enum FieldOptions_CType {
+    /** STRING - Default mode. */
+    STRING = 0,
+    CORD = 1,
+    STRING_PIECE = 2
+}
+export declare function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType;
+export declare function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string;
+export declare enum FieldOptions_JSType {
+    /** JS_NORMAL - Use the default type. */
+    JS_NORMAL = 0,
+    /** JS_STRING - Use JavaScript strings. */
+    JS_STRING = 1,
+    /** JS_NUMBER - Use JavaScript numbers. */
+    JS_NUMBER = 2
+}
+export declare function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType;
+export declare function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string;
+export interface OneofOptions {
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export interface EnumOptions {
+    /**
+     * Set this option to true to allow mapping different tag names to the same
+     * value.
+     */
+    allowAlias: boolean;
+    /**
+     * Is this enum deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for the enum, or it will be completely ignored; in the very least, this
+     * is a formalization for deprecating enums.
+     */
+    deprecated: boolean;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export interface EnumValueOptions {
+    /**
+     * Is this enum value deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for the enum value, or it will be completely ignored; in the very least,
+     * this is a formalization for deprecating enum values.
+     */
+    deprecated: boolean;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export interface ServiceOptions {
+    /**
+     * Is this service deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for the service, or it will be completely ignored; in the very least,
+     * this is a formalization for deprecating services.
+     */
+    deprecated: boolean;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+export interface MethodOptions {
+    /**
+     * Is this method deprecated?
+     * Depending on the target platform, this can emit Deprecated annotations
+     * for the method, or it will be completely ignored; in the very least,
+     * this is a formalization for deprecating methods.
+     */
+    deprecated: boolean;
+    idempotencyLevel: MethodOptions_IdempotencyLevel;
+    /** The parser stores options it doesn't recognize here. See above. */
+    uninterpretedOption: UninterpretedOption[];
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+export declare enum MethodOptions_IdempotencyLevel {
+    IDEMPOTENCY_UNKNOWN = 0,
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    NO_SIDE_EFFECTS = 1,
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    IDEMPOTENT = 2
+}
+export declare function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel;
+export declare function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string;
+/**
+ * A message representing a option the parser does not recognize. This only
+ * appears in options protos created by the compiler::Parser class.
+ * DescriptorPool resolves these when building Descriptor objects. Therefore,
+ * options protos in descriptor objects (e.g. returned by Descriptor::options(),
+ * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+ * in them.
+ */
+export interface UninterpretedOption {
+    name: UninterpretedOption_NamePart[];
+    /**
+     * The value of the uninterpreted option, in whatever type the tokenizer
+     * identified it as during parsing. Exactly one of these should be set.
+     */
+    identifierValue: string;
+    positiveIntValue: string;
+    negativeIntValue: string;
+    doubleValue: number;
+    stringValue: Buffer;
+    aggregateValue: string;
+}
+/**
+ * The name of the uninterpreted option.  Each string represents a segment in
+ * a dot-separated name.  is_extension is true iff a segment represents an
+ * extension (denoted with parentheses in options specs in .proto files).
+ * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
+ * "foo.(bar.baz).moo".
+ */
+export interface UninterpretedOption_NamePart {
+    namePart: string;
+    isExtension: boolean;
+}
+/**
+ * Encapsulates information about the original source file from which a
+ * FileDescriptorProto was generated.
+ */
+export interface SourceCodeInfo {
+    /**
+     * A Location identifies a piece of source code in a .proto file which
+     * corresponds to a particular definition.  This information is intended
+     * to be useful to IDEs, code indexers, documentation generators, and similar
+     * tools.
+     *
+     * For example, say we have a file like:
+     *   message Foo {
+     *     optional string foo = 1;
+     *   }
+     * Let's look at just the field definition:
+     *   optional string foo = 1;
+     *   ^       ^^     ^^  ^  ^^^
+     *   a       bc     de  f  ghi
+     * We have the following locations:
+     *   span   path               represents
+     *   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
+     *   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
+     *   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
+     *   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
+     *   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
+     *
+     * Notes:
+     * - A location may refer to a repeated field itself (i.e. not to any
+     *   particular index within it).  This is used whenever a set of elements are
+     *   logically enclosed in a single code segment.  For example, an entire
+     *   extend block (possibly containing multiple extension definitions) will
+     *   have an outer location whose path refers to the "extensions" repeated
+     *   field without an index.
+     * - Multiple locations may have the same path.  This happens when a single
+     *   logical declaration is spread out across multiple places.  The most
+     *   obvious example is the "extend" block again -- there may be multiple
+     *   extend blocks in the same scope, each of which will have the same path.
+     * - A location's span is not always a subset of its parent's span.  For
+     *   example, the "extendee" of an extension declaration appears at the
+     *   beginning of the "extend" block and is shared by all extensions within
+     *   the block.
+     * - Just because a location's span is a subset of some other location's span
+     *   does not mean that it is a descendant.  For example, a "group" defines
+     *   both a type and a field in a single declaration.  Thus, the locations
+     *   corresponding to the type and field and their components will overlap.
+     * - Code which tries to interpret locations should probably be designed to
+     *   ignore those that it doesn't understand, as more types of locations could
+     *   be recorded in the future.
+     */
+    location: SourceCodeInfo_Location[];
+}
+export interface SourceCodeInfo_Location {
+    /**
+     * Identifies which part of the FileDescriptorProto was defined at this
+     * location.
+     *
+     * Each element is a field number or an index.  They form a path from
+     * the root FileDescriptorProto to the place where the definition occurs.
+     * For example, this path:
+     *   [ 4, 3, 2, 7, 1 ]
+     * refers to:
+     *   file.message_type(3)  // 4, 3
+     *       .field(7)         // 2, 7
+     *       .name()           // 1
+     * This is because FileDescriptorProto.message_type has field number 4:
+     *   repeated DescriptorProto message_type = 4;
+     * and DescriptorProto.field has field number 2:
+     *   repeated FieldDescriptorProto field = 2;
+     * and FieldDescriptorProto.name has field number 1:
+     *   optional string name = 1;
+     *
+     * Thus, the above path gives the location of a field name.  If we removed
+     * the last element:
+     *   [ 4, 3, 2, 7 ]
+     * this path refers to the whole field declaration (from the beginning
+     * of the label to the terminating semicolon).
+     */
+    path: number[];
+    /**
+     * Always has exactly three or four elements: start line, start column,
+     * end line (optional, otherwise assumed same as start line), end column.
+     * These are packed into a single field for efficiency.  Note that line
+     * and column numbers are zero-based -- typically you will want to add
+     * 1 to each before displaying to a user.
+     */
+    span: number[];
+    /**
+     * If this SourceCodeInfo represents a complete declaration, these are any
+     * comments appearing before and after the declaration which appear to be
+     * attached to the declaration.
+     *
+     * A series of line comments appearing on consecutive lines, with no other
+     * tokens appearing on those lines, will be treated as a single comment.
+     *
+     * leading_detached_comments will keep paragraphs of comments that appear
+     * before (but not connected to) the current element. Each paragraph,
+     * separated by empty lines, will be one comment element in the repeated
+     * field.
+     *
+     * Only the comment content is provided; comment markers (e.g. //) are
+     * stripped out.  For block comments, leading whitespace and an asterisk
+     * will be stripped from the beginning of each line other than the first.
+     * Newlines are included in the output.
+     *
+     * Examples:
+     *
+     *   optional int32 foo = 1;  // Comment attached to foo.
+     *   // Comment attached to bar.
+     *   optional int32 bar = 2;
+     *
+     *   optional string baz = 3;
+     *   // Comment attached to baz.
+     *   // Another line attached to baz.
+     *
+     *   // Comment attached to moo.
+     *   //
+     *   // Another line attached to moo.
+     *   optional double moo = 4;
+     *
+     *   // Detached comment for corge. This is not leading or trailing comments
+     *   // to moo or corge because there are blank lines separating it from
+     *   // both.
+     *
+     *   // Detached comment for corge paragraph 2.
+     *
+     *   optional string corge = 5;
+     *   /* Block comment attached
+     *    * to corge.  Leading asterisks
+     *    * will be removed. * /
+     *   /* Block comment attached to
+     *    * grault. * /
+     *   optional int32 grault = 6;
+     *
+     *   // ignored detached comments.
+     */
+    leadingComments: string;
+    trailingComments: string;
+    leadingDetachedComments: string[];
+}
+/**
+ * Describes the relationship between generated code and its original source
+ * file. A GeneratedCodeInfo message is associated with only one generated
+ * source file, but may contain references to different source .proto files.
+ */
+export interface GeneratedCodeInfo {
+    /**
+     * An Annotation connects some span of text in generated code to an element
+     * of its generating .proto file.
+     */
+    annotation: GeneratedCodeInfo_Annotation[];
+}
+export interface GeneratedCodeInfo_Annotation {
+    /**
+     * Identifies the element in the original source .proto file. This field
+     * is formatted the same as SourceCodeInfo.Location.path.
+     */
+    path: number[];
+    /** Identifies the filesystem path to the original source .proto. */
+    sourceFile: string;
+    /**
+     * Identifies the starting offset in bytes in the generated code
+     * that relates to the identified object.
+     */
+    begin: number;
+    /**
+     * Identifies the ending offset in bytes in the generated code that
+     * relates to the identified offset. The end offset should be one past
+     * the last relevant byte (so the length of the text = end - begin).
+     */
+    end: number;
+}
+export declare const FileDescriptorSet: {
+    fromJSON(object: any): FileDescriptorSet;
+    toJSON(message: FileDescriptorSet): unknown;
+};
+export declare const FileDescriptorProto: {
+    fromJSON(object: any): FileDescriptorProto;
+    toJSON(message: FileDescriptorProto): unknown;
+};
+export declare const DescriptorProto: {
+    fromJSON(object: any): DescriptorProto;
+    toJSON(message: DescriptorProto): unknown;
+};
+export declare const DescriptorProto_ExtensionRange: {
+    fromJSON(object: any): DescriptorProto_ExtensionRange;
+    toJSON(message: DescriptorProto_ExtensionRange): unknown;
+};
+export declare const DescriptorProto_ReservedRange: {
+    fromJSON(object: any): DescriptorProto_ReservedRange;
+    toJSON(message: DescriptorProto_ReservedRange): unknown;
+};
+export declare const ExtensionRangeOptions: {
+    fromJSON(object: any): ExtensionRangeOptions;
+    toJSON(message: ExtensionRangeOptions): unknown;
+};
+export declare const FieldDescriptorProto: {
+    fromJSON(object: any): FieldDescriptorProto;
+    toJSON(message: FieldDescriptorProto): unknown;
+};
+export declare const OneofDescriptorProto: {
+    fromJSON(object: any): OneofDescriptorProto;
+    toJSON(message: OneofDescriptorProto): unknown;
+};
+export declare const EnumDescriptorProto: {
+    fromJSON(object: any): EnumDescriptorProto;
+    toJSON(message: EnumDescriptorProto): unknown;
+};
+export declare const EnumDescriptorProto_EnumReservedRange: {
+    fromJSON(object: any): EnumDescriptorProto_EnumReservedRange;
+    toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown;
+};
+export declare const EnumValueDescriptorProto: {
+    fromJSON(object: any): EnumValueDescriptorProto;
+    toJSON(message: EnumValueDescriptorProto): unknown;
+};
+export declare const ServiceDescriptorProto: {
+    fromJSON(object: any): ServiceDescriptorProto;
+    toJSON(message: ServiceDescriptorProto): unknown;
+};
+export declare const MethodDescriptorProto: {
+    fromJSON(object: any): MethodDescriptorProto;
+    toJSON(message: MethodDescriptorProto): unknown;
+};
+export declare const FileOptions: {
+    fromJSON(object: any): FileOptions;
+    toJSON(message: FileOptions): unknown;
+};
+export declare const MessageOptions: {
+    fromJSON(object: any): MessageOptions;
+    toJSON(message: MessageOptions): unknown;
+};
+export declare const FieldOptions: {
+    fromJSON(object: any): FieldOptions;
+    toJSON(message: FieldOptions): unknown;
+};
+export declare const OneofOptions: {
+    fromJSON(object: any): OneofOptions;
+    toJSON(message: OneofOptions): unknown;
+};
+export declare const EnumOptions: {
+    fromJSON(object: any): EnumOptions;
+    toJSON(message: EnumOptions): unknown;
+};
+export declare const EnumValueOptions: {
+    fromJSON(object: any): EnumValueOptions;
+    toJSON(message: EnumValueOptions): unknown;
+};
+export declare const ServiceOptions: {
+    fromJSON(object: any): ServiceOptions;
+    toJSON(message: ServiceOptions): unknown;
+};
+export declare const MethodOptions: {
+    fromJSON(object: any): MethodOptions;
+    toJSON(message: MethodOptions): unknown;
+};
+export declare const UninterpretedOption: {
+    fromJSON(object: any): UninterpretedOption;
+    toJSON(message: UninterpretedOption): unknown;
+};
+export declare const UninterpretedOption_NamePart: {
+    fromJSON(object: any): UninterpretedOption_NamePart;
+    toJSON(message: UninterpretedOption_NamePart): unknown;
+};
+export declare const SourceCodeInfo: {
+    fromJSON(object: any): SourceCodeInfo;
+    toJSON(message: SourceCodeInfo): unknown;
+};
+export declare const SourceCodeInfo_Location: {
+    fromJSON(object: any): SourceCodeInfo_Location;
+    toJSON(message: SourceCodeInfo_Location): unknown;
+};
+export declare const GeneratedCodeInfo: {
+    fromJSON(object: any): GeneratedCodeInfo;
+    toJSON(message: GeneratedCodeInfo): unknown;
+};
+export declare const GeneratedCodeInfo_Annotation: {
+    fromJSON(object: any): GeneratedCodeInfo_Annotation;
+    toJSON(message: GeneratedCodeInfo_Annotation): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..b8cfc86ab99aa
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+    return { file: [] };
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file) {
+            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.file = [];
+        }
+        return obj;
+    },
+};
+function createBaseFileDescriptorProto() {
+    return {
+        name: "",
+        package: "",
+        dependency: [],
+        publicDependency: [],
+        weakDependency: [],
+        messageType: [],
+        enumType: [],
+        service: [],
+        extension: [],
+        options: undefined,
+        sourceCodeInfo: undefined,
+        syntax: "",
+    };
+}
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            package: isSet(object.package) ? String(object.package) : "",
+            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+            publicDependency: Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => Number(e))
+                : [],
+            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+            messageType: Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? String(object.syntax) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.package !== undefined && (obj.package = message.package);
+        if (message.dependency) {
+            obj.dependency = message.dependency.map((e) => e);
+        }
+        else {
+            obj.dependency = [];
+        }
+        if (message.publicDependency) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.publicDependency = [];
+        }
+        if (message.weakDependency) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.weakDependency = [];
+        }
+        if (message.messageType) {
+            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.messageType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.service) {
+            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.service = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+        message.sourceCodeInfo !== undefined &&
+            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+        message.syntax !== undefined && (obj.syntax = message.syntax);
+        return obj;
+    },
+};
+function createBaseDescriptorProto() {
+    return {
+        name: "",
+        field: [],
+        extension: [],
+        nestedType: [],
+        enumType: [],
+        extensionRange: [],
+        oneofDecl: [],
+        options: undefined,
+        reservedRange: [],
+        reservedName: [],
+    };
+}
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            extensionRange: Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.field) {
+            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.field = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        if (message.nestedType) {
+            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.nestedType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.extensionRange) {
+            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.extensionRange = [];
+        }
+        if (message.oneofDecl) {
+            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.oneofDecl = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+    return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? Number(object.start) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseExtensionRangeOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldDescriptorProto() {
+    return {
+        name: "",
+        number: 0,
+        label: 1,
+        type: 1,
+        typeName: "",
+        extendee: "",
+        defaultValue: "",
+        oneofIndex: 0,
+        jsonName: "",
+        options: undefined,
+        proto3Optional: false,
+    };
+}
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+        message.typeName !== undefined && (obj.typeName = message.typeName);
+        message.extendee !== undefined && (obj.extendee = message.extendee);
+        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+        return obj;
+    },
+};
+function createBaseOneofDescriptorProto() {
+    return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto() {
+    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.value) {
+            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.value = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseEnumValueDescriptorProto() {
+    return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseServiceDescriptorProto() {
+    return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.method) {
+            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.method = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseMethodDescriptorProto() {
+    return {
+        name: "",
+        inputType: "",
+        outputType: "",
+        options: undefined,
+        clientStreaming: false,
+        serverStreaming: false,
+    };
+}
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            inputType: isSet(object.inputType) ? String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.inputType !== undefined && (obj.inputType = message.inputType);
+        message.outputType !== undefined && (obj.outputType = message.outputType);
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+        return obj;
+    },
+};
+function createBaseFileOptions() {
+    return {
+        javaPackage: "",
+        javaOuterClassname: "",
+        javaMultipleFiles: false,
+        javaGenerateEqualsAndHash: false,
+        javaStringCheckUtf8: false,
+        optimizeFor: 1,
+        goPackage: "",
+        ccGenericServices: false,
+        javaGenericServices: false,
+        pyGenericServices: false,
+        phpGenericServices: false,
+        deprecated: false,
+        ccEnableArenas: false,
+        objcClassPrefix: "",
+        csharpNamespace: "",
+        swiftPrefix: "",
+        phpClassPrefix: "",
+        phpNamespace: "",
+        phpMetadataNamespace: "",
+        rubyPackage: "",
+        uninterpretedOption: [],
+    };
+}
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+        message.javaGenerateEqualsAndHash !== undefined &&
+            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMessageOptions() {
+    return {
+        messageSetWireFormat: false,
+        noStandardDescriptorAccessor: false,
+        deprecated: false,
+        mapEntry: false,
+        uninterpretedOption: [],
+    };
+}
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+        message.noStandardDescriptorAccessor !== undefined &&
+            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldOptions() {
+    return {
+        ctype: 0,
+        packed: false,
+        jstype: 0,
+        lazy: false,
+        unverifiedLazy: false,
+        deprecated: false,
+        weak: false,
+        uninterpretedOption: [],
+    };
+}
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? Boolean(object.weak) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+        message.packed !== undefined && (obj.packed = message.packed);
+        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+        message.lazy !== undefined && (obj.lazy = message.lazy);
+        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.weak !== undefined && (obj.weak = message.weak);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseOneofOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumOptions() {
+    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumValueOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseServiceOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMethodOptions() {
+    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.idempotencyLevel !== undefined &&
+            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseUninterpretedOption() {
+    return {
+        name: [],
+        identifierValue: "",
+        positiveIntValue: "0",
+        negativeIntValue: "0",
+        doubleValue: 0,
+        stringValue: Buffer.alloc(0),
+        aggregateValue: "",
+    };
+}
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name) {
+            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+        }
+        else {
+            obj.name = [];
+        }
+        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+        message.stringValue !== undefined &&
+            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+        return obj;
+    },
+};
+function createBaseUninterpretedOption_NamePart() {
+    return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.namePart !== undefined && (obj.namePart = message.namePart);
+        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo() {
+    return { location: [] };
+}
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location) {
+            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+        }
+        else {
+            obj.location = [];
+        }
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo_Location() {
+    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        if (message.span) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        else {
+            obj.span = [];
+        }
+        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+        if (message.leadingDetachedComments) {
+            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+        }
+        else {
+            obj.leadingDetachedComments = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo() {
+    return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation) {
+            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+        }
+        else {
+            obj.annotation = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+    return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? Number(object.begin) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+        message.begin !== undefined && (obj.begin = Math.round(message.begin));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (globalThis.Buffer) {
+        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = globalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (globalThis.Buffer) {
+        return globalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return globalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts
new file mode 100644
index 0000000000000..1ab812b4a9407
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts
@@ -0,0 +1,110 @@
+/**
+ * A Timestamp represents a point in time independent of any time zone or local
+ * calendar, encoded as a count of seconds and fractions of seconds at
+ * nanosecond resolution. The count is relative to an epoch at UTC midnight on
+ * January 1, 1970, in the proleptic Gregorian calendar which extends the
+ * Gregorian calendar backwards to year one.
+ *
+ * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+ * second table is needed for interpretation, using a [24-hour linear
+ * smear](https://developers.google.com/time/smear).
+ *
+ * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+ * restricting to that range, we ensure that we can convert to and from [RFC
+ * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+ *
+ * # Examples
+ *
+ * Example 1: Compute Timestamp from POSIX `time()`.
+ *
+ *     Timestamp timestamp;
+ *     timestamp.set_seconds(time(NULL));
+ *     timestamp.set_nanos(0);
+ *
+ * Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+ *
+ *     struct timeval tv;
+ *     gettimeofday(&tv, NULL);
+ *
+ *     Timestamp timestamp;
+ *     timestamp.set_seconds(tv.tv_sec);
+ *     timestamp.set_nanos(tv.tv_usec * 1000);
+ *
+ * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+ *
+ *     FILETIME ft;
+ *     GetSystemTimeAsFileTime(&ft);
+ *     UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+ *
+ *     // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ *     // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ *     Timestamp timestamp;
+ *     timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ *     timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+ *
+ * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+ *
+ *     long millis = System.currentTimeMillis();
+ *
+ *     Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ *         .setNanos((int) ((millis % 1000) * 1000000)).build();
+ *
+ * Example 5: Compute Timestamp from Java `Instant.now()`.
+ *
+ *     Instant now = Instant.now();
+ *
+ *     Timestamp timestamp =
+ *         Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ *             .setNanos(now.getNano()).build();
+ *
+ * Example 6: Compute Timestamp from current time in Python.
+ *
+ *     timestamp = Timestamp()
+ *     timestamp.GetCurrentTime()
+ *
+ * # JSON Mapping
+ *
+ * In JSON format, the Timestamp type is encoded as a string in the
+ * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+ * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+ * where {year} is always expressed using four digits while {month}, {day},
+ * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+ * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+ * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+ * is required. A proto3 JSON serializer should always use UTC (as indicated by
+ * "Z") when printing the Timestamp type and a proto3 JSON parser should be
+ * able to accept both UTC and other timezones (as indicated by an offset).
+ *
+ * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+ * 01:30 UTC on January 15, 2017.
+ *
+ * In JavaScript, one can convert a Date object to this format using the
+ * standard
+ * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+ * method. In Python, a standard `datetime.datetime` object can be converted
+ * to this format using
+ * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+ * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+ * the Joda Time's [`ISODateTimeFormat.dateTime()`](
+ * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
+ * ) to obtain a formatter capable of generating timestamps in this format.
+ */
+export interface Timestamp {
+    /**
+     * Represents seconds of UTC time since Unix epoch
+     * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+     * 9999-12-31T23:59:59Z inclusive.
+     */
+    seconds: string;
+    /**
+     * Non-negative fractions of a second at nanosecond resolution. Negative
+     * second values with fractions must still have non-negative nanos values
+     * that count forward in time. Must be from 0 to 999,999,999
+     * inclusive.
+     */
+    nanos: number;
+}
+export declare const Timestamp: {
+    fromJSON(object: any): Timestamp;
+    toJSON(message: Timestamp): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+    return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.seconds !== undefined && (obj.seconds = message.seconds);
+        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts
new file mode 100644
index 0000000000000..51f748f459130
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts
@@ -0,0 +1,72 @@
+import { Envelope } from "./envelope";
+import { MessageSignature, PublicKeyIdentifier, RFC3161SignedTimestamp, X509CertificateChain } from "./sigstore_common";
+import { TransparencyLogEntry } from "./sigstore_rekor";
+/**
+ * Various timestamped counter signatures over the artifacts signature.
+ * Currently only RFC3161 signatures are provided. More formats may be added
+ * in the future.
+ */
+export interface TimestampVerificationData {
+    /**
+     * A list of RFC3161 signed timestamps provided by the user.
+     * This can be used when the entry has not been stored on a
+     * transparency log, or in conjunction for a stronger trust model.
+     * Clients MUST verify the hashed message in the message imprint
+     * against the signature in the bundle.
+     */
+    rfc3161Timestamps: RFC3161SignedTimestamp[];
+}
+/**
+ * VerificationMaterial captures details on the materials used to verify
+ * signatures.
+ */
+export interface VerificationMaterial {
+    content?: {
+        $case: "publicKey";
+        publicKey: PublicKeyIdentifier;
+    } | {
+        $case: "x509CertificateChain";
+        x509CertificateChain: X509CertificateChain;
+    };
+    /**
+     * This is the inclusion promise and/or proof, where
+     * the timestamp is coming from the transparency log.
+     */
+    tlogEntries: TransparencyLogEntry[];
+    /** Timestamp verification data, over the artifact's signature. */
+    timestampVerificationData: TimestampVerificationData | undefined;
+}
+export interface Bundle {
+    /**
+     * MUST be application/vnd.dev.sigstore.bundle+json;version=0.1
+     * when encoded as JSON.
+     */
+    mediaType: string;
+    /**
+     * When a signer is identified by a X.509 certificate, a verifier MUST
+     * verify that the signature was computed at the time the certificate
+     * was valid as described in the Sigstore client spec: "Verification
+     * using a Bundle".
+     * <https://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln>
+     */
+    verificationMaterial: VerificationMaterial | undefined;
+    content?: {
+        $case: "messageSignature";
+        messageSignature: MessageSignature;
+    } | {
+        $case: "dsseEnvelope";
+        dsseEnvelope: Envelope;
+    };
+}
+export declare const TimestampVerificationData: {
+    fromJSON(object: any): TimestampVerificationData;
+    toJSON(message: TimestampVerificationData): unknown;
+};
+export declare const VerificationMaterial: {
+    fromJSON(object: any): VerificationMaterial;
+    toJSON(message: VerificationMaterial): unknown;
+};
+export declare const Bundle: {
+    fromJSON(object: any): Bundle;
+    toJSON(message: Bundle): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..1ef3e1b3356b7
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+    return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+        }
+        else {
+            obj.rfc3161Timestamps = [];
+        }
+        return obj;
+    },
+};
+function createBaseVerificationMaterial() {
+    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : undefined,
+            tlogEntries: Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.content?.$case === "publicKey" &&
+            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+        message.content?.$case === "x509CertificateChain" &&
+            (obj.x509CertificateChain = message.content?.x509CertificateChain
+                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+                : undefined);
+        if (message.tlogEntries) {
+            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogEntries = [];
+        }
+        message.timestampVerificationData !== undefined &&
+            (obj.timestampVerificationData = message.timestampVerificationData
+                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+                : undefined);
+        return obj;
+    },
+};
+function createBaseBundle() {
+    return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+            : undefined);
+        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+            : undefined);
+        message.content?.$case === "dsseEnvelope" &&
+            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts
new file mode 100644
index 0000000000000..0d8c2d5ebde7d
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts
@@ -0,0 +1,228 @@
+/// <reference types="node" />
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+export declare enum HashAlgorithm {
+    HASH_ALGORITHM_UNSPECIFIED = 0,
+    SHA2_256 = 1
+}
+export declare function hashAlgorithmFromJSON(object: any): HashAlgorithm;
+export declare function hashAlgorithmToJSON(object: HashAlgorithm): string;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+export declare enum PublicKeyDetails {
+    PUBLIC_KEY_DETAILS_UNSPECIFIED = 0,
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PKCS1_RSA_PKCS1V5 = 1,
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PKCS1_RSA_PSS = 2,
+    PKIX_RSA_PKCS1V5 = 3,
+    PKIX_RSA_PSS = 4,
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PKIX_ECDSA_P256_SHA_256 = 5,
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PKIX_ECDSA_P256_HMAC_SHA_256 = 6,
+    /** PKIX_ED25519 - Ed 25519 */
+    PKIX_ED25519 = 7
+}
+export declare function publicKeyDetailsFromJSON(object: any): PublicKeyDetails;
+export declare function publicKeyDetailsToJSON(object: PublicKeyDetails): string;
+export declare enum SubjectAlternativeNameType {
+    SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0,
+    EMAIL = 1,
+    URI = 2,
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    OTHER_NAME = 3
+}
+export declare function subjectAlternativeNameTypeFromJSON(object: any): SubjectAlternativeNameType;
+export declare function subjectAlternativeNameTypeToJSON(object: SubjectAlternativeNameType): string;
+/**
+ * HashOutput captures a digest of a 'message' (generic octet sequence)
+ * and the corresponding hash algorithm used.
+ */
+export interface HashOutput {
+    algorithm: HashAlgorithm;
+    /**
+     * This is the raw octets of the message digest as computed by
+     * the hash algorithm.
+     */
+    digest: Buffer;
+}
+/** MessageSignature stores the computed signature over a message. */
+export interface MessageSignature {
+    /** Message digest can be used to identify the artifact. */
+    messageDigest: HashOutput | undefined;
+    /**
+     * The raw bytes as returned from the signature algorithm.
+     * The signature algorithm (and so the format of the signature bytes)
+     * are determined by the contents of the 'verification_material',
+     * either a key-pair or a certificate. If using a certificate, the
+     * certificate contains the required information on the signature
+     * algorithm.
+     * When using a key pair, the algorithm MUST be part of the public
+     * key, which MUST be communicated out-of-band.
+     */
+    signature: Buffer;
+}
+/** LogId captures the identity of a transparency log. */
+export interface LogId {
+    /**
+     * The unique id of the log, represented as the SHA-256 hash
+     * of the log's public key, computed over the DER encoding.
+     * <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
+     */
+    keyId: Buffer;
+}
+/** This message holds a RFC 3161 timestamp. */
+export interface RFC3161SignedTimestamp {
+    /**
+     * Signed timestamp is the DER encoded TimeStampResponse.
+     * See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2
+     */
+    signedTimestamp: Buffer;
+}
+export interface PublicKey {
+    /**
+     * DER-encoded public key, encoding method is specified by the
+     * key_details attribute.
+     */
+    rawBytes?: Buffer | undefined;
+    /** Key encoding and signature algorithm to use for this key. */
+    keyDetails: PublicKeyDetails;
+    /** Optional validity period for this key. */
+    validFor?: TimeRange | undefined;
+}
+/**
+ * PublicKeyIdentifier can be used to identify an (out of band) delivered
+ * key, to verify a signature.
+ */
+export interface PublicKeyIdentifier {
+    /**
+     * Optional unauthenticated hint on which key to use.
+     * The format of the hint must be agreed upon out of band by the
+     * signer and the verifiers, and so is not subject to this
+     * specification.
+     * Example use-case is to specify the public key to use, from a
+     * trusted key-ring.
+     * Implementors are RECOMMENDED to derive the value from the public
+     * key as described in RFC 6962.
+     * See: <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
+     */
+    hint: string;
+}
+/** An ASN.1 OBJECT IDENTIFIER */
+export interface ObjectIdentifier {
+    id: number[];
+}
+/** An OID and the corresponding (byte) value. */
+export interface ObjectIdentifierValuePair {
+    oid: ObjectIdentifier | undefined;
+    value: Buffer;
+}
+export interface DistinguishedName {
+    organization: string;
+    commonName: string;
+}
+export interface X509Certificate {
+    /** DER-encoded X.509 certificate. */
+    rawBytes: Buffer;
+}
+export interface SubjectAlternativeName {
+    type: SubjectAlternativeNameType;
+    identity?: {
+        $case: "regexp";
+        regexp: string;
+    } | {
+        $case: "value";
+        value: string;
+    };
+}
+/** A chain of X.509 certificates. */
+export interface X509CertificateChain {
+    /**
+     * The chain of certificates, with indices 0 to n.
+     * The first certificate in the array must be the leaf
+     * certificate used for signing. Any intermediate certificates
+     * must be stored as offset 1 to n-1, and the root certificate at
+     * position n.
+     */
+    certificates: X509Certificate[];
+}
+/**
+ * The time range is half-open and does not include the end timestamp,
+ * i.e [start, end).
+ * End is optional to be able to capture a period that has started but
+ * has no known end.
+ */
+export interface TimeRange {
+    start: Date | undefined;
+    end?: Date | undefined;
+}
+export declare const HashOutput: {
+    fromJSON(object: any): HashOutput;
+    toJSON(message: HashOutput): unknown;
+};
+export declare const MessageSignature: {
+    fromJSON(object: any): MessageSignature;
+    toJSON(message: MessageSignature): unknown;
+};
+export declare const LogId: {
+    fromJSON(object: any): LogId;
+    toJSON(message: LogId): unknown;
+};
+export declare const RFC3161SignedTimestamp: {
+    fromJSON(object: any): RFC3161SignedTimestamp;
+    toJSON(message: RFC3161SignedTimestamp): unknown;
+};
+export declare const PublicKey: {
+    fromJSON(object: any): PublicKey;
+    toJSON(message: PublicKey): unknown;
+};
+export declare const PublicKeyIdentifier: {
+    fromJSON(object: any): PublicKeyIdentifier;
+    toJSON(message: PublicKeyIdentifier): unknown;
+};
+export declare const ObjectIdentifier: {
+    fromJSON(object: any): ObjectIdentifier;
+    toJSON(message: ObjectIdentifier): unknown;
+};
+export declare const ObjectIdentifierValuePair: {
+    fromJSON(object: any): ObjectIdentifierValuePair;
+    toJSON(message: ObjectIdentifierValuePair): unknown;
+};
+export declare const DistinguishedName: {
+    fromJSON(object: any): DistinguishedName;
+    toJSON(message: DistinguishedName): unknown;
+};
+export declare const X509Certificate: {
+    fromJSON(object: any): X509Certificate;
+    toJSON(message: X509Certificate): unknown;
+};
+export declare const SubjectAlternativeName: {
+    fromJSON(object: any): SubjectAlternativeName;
+    toJSON(message: SubjectAlternativeName): unknown;
+};
+export declare const X509CertificateChain: {
+    fromJSON(object: any): X509CertificateChain;
+    toJSON(message: X509CertificateChain): unknown;
+};
+export declare const TimeRange: {
+    fromJSON(object: any): TimeRange;
+    toJSON(message: TimeRange): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..63ace8db580cc
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+    return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+        message.digest !== undefined &&
+            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseMessageSignature() {
+    return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageDigest !== undefined &&
+            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+        message.signature !== undefined &&
+            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseLogId() {
+    return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.keyId !== undefined &&
+            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseRFC3161SignedTimestamp() {
+    return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedTimestamp !== undefined &&
+            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBasePublicKey() {
+    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBasePublicKeyIdentifier() {
+    return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.hint !== undefined && (obj.hint = message.hint);
+        return obj;
+    },
+};
+function createBaseObjectIdentifier() {
+    return { id: [] };
+}
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        else {
+            obj.id = [];
+        }
+        return obj;
+    },
+};
+function createBaseObjectIdentifierValuePair() {
+    return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseDistinguishedName() {
+    return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? String(object.organization) : "",
+            commonName: isSet(object.commonName) ? String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.organization !== undefined && (obj.organization = message.organization);
+        message.commonName !== undefined && (obj.commonName = message.commonName);
+        return obj;
+    },
+};
+function createBaseX509Certificate() {
+    return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseSubjectAlternativeName() {
+    return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+        message.identity?.$case === "value" && (obj.value = message.identity?.value);
+        return obj;
+    },
+};
+function createBaseX509CertificateChain() {
+    return { certificates: [] };
+}
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates) {
+            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificates = [];
+        }
+        return obj;
+    },
+};
+function createBaseTimeRange() {
+    return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = message.start.toISOString());
+        message.end !== undefined && (obj.end = message.end.toISOString());
+        return obj;
+    },
+};
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (globalThis.Buffer) {
+        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = globalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (globalThis.Buffer) {
+        return globalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return globalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts
new file mode 100644
index 0000000000000..9e33bb80e2a86
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts
@@ -0,0 +1,129 @@
+/// <reference types="node" />
+import { LogId } from "./sigstore_common";
+/** KindVersion contains the entry's kind and api version. */
+export interface KindVersion {
+    /**
+     * Kind is the type of entry being stored in the log.
+     * See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types
+     */
+    kind: string;
+    /** The specific api version of the type. */
+    version: string;
+}
+/**
+ * The checkpoint contains a signature of the tree head (root hash),
+ * size of the tree, the transparency log's unique identifier (log ID),
+ * hostname and the current time.
+ * The result is a string, the format is described here
+ * https://github.com/transparency-dev/formats/blob/main/log/README.md
+ * The details are here https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/util/signed_note.go#L114
+ * The signature has the same format as
+ * InclusionPromise.signed_entry_timestamp. See below for more details.
+ */
+export interface Checkpoint {
+    envelope: string;
+}
+/**
+ * InclusionProof is the proof returned from the transparency log. Can
+ * be used for on line verification against the log.
+ */
+export interface InclusionProof {
+    /** The index of the entry in the log. */
+    logIndex: string;
+    /**
+     * The hash digest stored at the root of the merkle tree at the time
+     * the proof was generated.
+     */
+    rootHash: Buffer;
+    /** The size of the merkle tree at the time the proof was generated. */
+    treeSize: string;
+    /**
+     * A list of hashes required to compute the inclusion proof, sorted
+     * in order from leaf to root.
+     * Not that leaf and root hashes are not included.
+     * The root has is available separately in this message, and the
+     * leaf hash should be calculated by the client.
+     */
+    hashes: Buffer[];
+    /**
+     * Signature of the tree head, as of the time of this proof was
+     * generated. See above info on 'Checkpoint' for more details.
+     */
+    checkpoint: Checkpoint | undefined;
+}
+/**
+ * The inclusion promise is calculated by Rekor. It's calculated as a
+ * signature over a canonical JSON serialization of the persisted entry, the
+ * log ID, log index and the integration timestamp.
+ * See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54
+ * The format of the signature depends on the transparency log's public key.
+ * If the signature algorithm requires a hash function and/or a signature
+ * scheme (e.g. RSA) those has to be retrieved out-of-band from the log's
+ * operators, together with the public key.
+ * This is used to verify the integration timestamp's value and that the log
+ * has promised to include the entry.
+ */
+export interface InclusionPromise {
+    signedEntryTimestamp: Buffer;
+}
+/**
+ * TransparencyLogEntry captures all the details required from Rekor to
+ * reconstruct an entry, given that the payload is provided via other means.
+ * This type can easily be created from the existing response from Rekor.
+ * Future iterations could rely on Rekor returning the minimal set of
+ * attributes (excluding the payload) that are required for verifying the
+ * inclusion promise. The inclusion promise (called SignedEntryTimestamp in
+ * the response from Rekor) is similar to a Signed Certificate Timestamp
+ * as described here https://www.rfc-editor.org/rfc/rfc9162#name-signed-certificate-timestam.
+ */
+export interface TransparencyLogEntry {
+    /** The index of the entry in the log. */
+    logIndex: string;
+    /** The unique identifier of the log. */
+    logId: LogId | undefined;
+    /**
+     * The kind (type) and version of the object associated with this
+     * entry. These values are required to construct the entry during
+     * verification.
+     */
+    kindVersion: KindVersion | undefined;
+    /** The UNIX timestamp from the log when the entry was persisted. */
+    integratedTime: string;
+    /** The inclusion promise/signed entry timestamp from the log. */
+    inclusionPromise: InclusionPromise | undefined;
+    /**
+     * The inclusion proof can be used for online verification that the
+     * entry was appended to the log, and that the log has not been
+     * altered.
+     */
+    inclusionProof: InclusionProof | undefined;
+    /**
+     * The canonicalized Rekor entry body, used for SET verification. This
+     * is the same as the body returned by Rekor. It's included here for
+     * cases where the client cannot deterministically reconstruct the
+     * bundle from the other fields. Clients MUST verify that the signature
+     * referenced in the canonicalized_body matches the signature provided
+     * in the bundle content.
+     */
+    canonicalizedBody: Buffer;
+}
+export declare const KindVersion: {
+    fromJSON(object: any): KindVersion;
+    toJSON(message: KindVersion): unknown;
+};
+export declare const Checkpoint: {
+    fromJSON(object: any): Checkpoint;
+    toJSON(message: Checkpoint): unknown;
+};
+export declare const InclusionProof: {
+    fromJSON(object: any): InclusionProof;
+    toJSON(message: InclusionProof): unknown;
+};
+export declare const InclusionPromise: {
+    fromJSON(object: any): InclusionPromise;
+    toJSON(message: InclusionPromise): unknown;
+};
+export declare const TransparencyLogEntry: {
+    fromJSON(object: any): TransparencyLogEntry;
+    toJSON(message: TransparencyLogEntry): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..bffc7700edbec
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+    return { kind: "", version: "" };
+}
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? String(object.kind) : "",
+            version: isSet(object.version) ? String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.kind !== undefined && (obj.kind = message.kind);
+        message.version !== undefined && (obj.version = message.version);
+        return obj;
+    },
+};
+function createBaseCheckpoint() {
+    return { envelope: "" };
+}
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.envelope !== undefined && (obj.envelope = message.envelope);
+        return obj;
+    },
+};
+function createBaseInclusionProof() {
+    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.rootHash !== undefined &&
+            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+        if (message.hashes) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+        }
+        else {
+            obj.hashes = [];
+        }
+        message.checkpoint !== undefined &&
+            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+        return obj;
+    },
+};
+function createBaseInclusionPromise() {
+    return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedEntryTimestamp !== undefined &&
+            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseTransparencyLogEntry() {
+    return {
+        logIndex: "0",
+        logId: undefined,
+        kindVersion: undefined,
+        integratedTime: "0",
+        inclusionPromise: undefined,
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.alloc(0),
+    };
+}
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        message.kindVersion !== undefined &&
+            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+        message.inclusionPromise !== undefined &&
+            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+        message.inclusionProof !== undefined &&
+            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+        message.canonicalizedBody !== undefined &&
+            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (globalThis.Buffer) {
+        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = globalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (globalThis.Buffer) {
+        return globalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return globalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts
new file mode 100644
index 0000000000000..152d08f5c6751
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts
@@ -0,0 +1,89 @@
+import { DistinguishedName, HashAlgorithm, LogId, PublicKey, TimeRange, X509CertificateChain } from "./sigstore_common";
+/**
+ * TransparencyLogInstance describes the immutable parameters from a
+ * transparency log.
+ * See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters
+ * for more details.
+ * The incluced parameters are the minimal set required to identify a log,
+ * and verify an inclusion promise.
+ */
+export interface TransparencyLogInstance {
+    /** The base URL at which can be used to URLs for the client. */
+    baseUrl: string;
+    /** The hash algorithm used for the Merkle Tree. */
+    hashAlgorithm: HashAlgorithm;
+    /**
+     * The public key used to verify signatures generated by the log.
+     * This attribute contains the signature algorithm used by the log.
+     */
+    publicKey: PublicKey | undefined;
+    /** The unique identifier for this transparency log. */
+    logId: LogId | undefined;
+}
+/**
+ * CertificateAuthority enlists the information required to identify which
+ * CA to use and perform signature verification.
+ */
+export interface CertificateAuthority {
+    /**
+     * The root certificate MUST be self-signed, and so the subject and
+     * issuer are the same.
+     */
+    subject: DistinguishedName | undefined;
+    /** The URI at which the CA can be accessed. */
+    uri: string;
+    /** The certificate chain for this CA. */
+    certChain: X509CertificateChain | undefined;
+    /**
+     * The time the *entire* chain was valid. This is at max the
+     * longest interval when *all* certificates in the chain were valid,
+     * but it MAY be shorter.
+     */
+    validFor: TimeRange | undefined;
+}
+/**
+ * TrustedRoot describes the client's complete set of trusted entities.
+ * How the TrustedRoot is populated is not specified, but can be a
+ * combination of many sources such as TUF repositories, files on disk etc.
+ *
+ * The TrustedRoot is not meant to be used for any artifact verification, only
+ * to capture the complete/global set of trusted verification materials.
+ * When verifying an artifact, based on the artifact and policies, a selection
+ * of keys/authorities are expected to be extracted and provided to the
+ * verification function. This way the set of keys/authorities kan be kept to
+ * a minimal set by the policy to gain better control over what signatures
+ * that are allowed.
+ */
+export interface TrustedRoot {
+    /** MUST be application/vnd.dev.sigstore.trustedroot+json;version=0.1 */
+    mediaType: string;
+    /** A set of trusted Rekor servers. */
+    tlogs: TransparencyLogInstance[];
+    /**
+     * A set of trusted certificate authorites (e.g Fulcio), and any
+     * intermediate certificates they provide.
+     * If a CA is issuing multiple intermediate certificate, each
+     * combination shall be represented as separate chain. I.e, a single
+     * root cert may appear in multiple chains but with different
+     * intermediate and/or leaf certificates.
+     * The certificates are intended to be used for verifying artifact
+     * signatures.
+     */
+    certificateAuthorities: CertificateAuthority[];
+    /** A set of trusted certificate transparency logs. */
+    ctlogs: TransparencyLogInstance[];
+    /** A set of trusted timestamping authorities. */
+    timestampAuthorities: CertificateAuthority[];
+}
+export declare const TransparencyLogInstance: {
+    fromJSON(object: any): TransparencyLogInstance;
+    toJSON(message: TransparencyLogInstance): unknown;
+};
+export declare const CertificateAuthority: {
+    fromJSON(object: any): CertificateAuthority;
+    toJSON(message: CertificateAuthority): unknown;
+};
+export declare const TrustedRoot: {
+    fromJSON(object: any): TrustedRoot;
+    toJSON(message: TrustedRoot): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..05e566767cdb2
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+        message.publicKey !== undefined &&
+            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        return obj;
+    },
+};
+function createBaseCertificateAuthority() {
+    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.subject !== undefined &&
+            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+        message.uri !== undefined && (obj.uri = message.uri);
+        message.certChain !== undefined &&
+            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBaseTrustedRoot() {
+    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        if (message.tlogs) {
+            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogs = [];
+        }
+        if (message.certificateAuthorities) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificateAuthorities = [];
+        }
+        if (message.ctlogs) {
+            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.ctlogs = [];
+        }
+        if (message.timestampAuthorities) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.timestampAuthorities = [];
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts
new file mode 100644
index 0000000000000..8ee32d8e66692
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts
@@ -0,0 +1,156 @@
+/// <reference types="node" />
+import { Bundle } from "./sigstore_bundle";
+import { ObjectIdentifierValuePair, PublicKey, SubjectAlternativeName } from "./sigstore_common";
+import { TrustedRoot } from "./sigstore_trustroot";
+/** The identity of a X.509 Certificate signer. */
+export interface CertificateIdentity {
+    /** The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) */
+    issuer: string;
+    san: SubjectAlternativeName | undefined;
+    /**
+     * An unordered list of OIDs that must be verified.
+     * All OID/values provided in this list MUST exactly match against
+     * the values in the certificate for verification to be successful.
+     */
+    oids: ObjectIdentifierValuePair[];
+}
+export interface CertificateIdentities {
+    identities: CertificateIdentity[];
+}
+export interface PublicKeyIdentities {
+    publicKeys: PublicKey[];
+}
+/**
+ * A light-weight set of options/policies for identifying trusted signers,
+ * used during verification of a single artifact.
+ */
+export interface ArtifactVerificationOptions {
+    signers?: {
+        $case: "certificateIdentities";
+        certificateIdentities: CertificateIdentities;
+    } | {
+        $case: "publicKeys";
+        publicKeys: PublicKeyIdentities;
+    };
+    /**
+     * Optional options for artifact transparency log verification.
+     * If none is provided, the default verification options are:
+     * Threshold: 1
+     * Online verification: false
+     * Disable: false
+     */
+    tlogOptions?: ArtifactVerificationOptions_TlogOptions | undefined;
+    /**
+     * Optional options for certificate transparency log verification.
+     * If none is provided, the default verification options are:
+     * Threshold: 1
+     * Detached SCT: false
+     * Disable: false
+     */
+    ctlogOptions?: ArtifactVerificationOptions_CtlogOptions | undefined;
+    /**
+     * Optional options for certificate signed timestamp verification.
+     * If none is provided, the default verification options are:
+     * Threshold: 1
+     * Disable: false
+     */
+    tsaOptions?: ArtifactVerificationOptions_TimestampAuthorityOptions | undefined;
+}
+export interface ArtifactVerificationOptions_TlogOptions {
+    /** Number of transparency logs the entry must appear on. */
+    threshold: number;
+    /** Perform an online inclusion proof. */
+    performOnlineVerification: boolean;
+    /** Disable verification for transparency logs. */
+    disable: boolean;
+}
+export interface ArtifactVerificationOptions_CtlogOptions {
+    /**
+     * The number of ct transparency logs the certificate must
+     * appear on.
+     */
+    threshold: number;
+    /**
+     * Expect detached SCTs.
+     * This is not supported right now as we can't capture an
+     * detached SCT in the bundle.
+     */
+    detachedSct: boolean;
+    /** Disable ct transparency log verification */
+    disable: boolean;
+}
+export interface ArtifactVerificationOptions_TimestampAuthorityOptions {
+    /** The number of signed timestamps that are expected. */
+    threshold: number;
+    /** Disable signed timestamp verification. */
+    disable: boolean;
+}
+export interface Artifact {
+    data?: {
+        $case: "artifactUri";
+        artifactUri: string;
+    } | {
+        $case: "artifact";
+        artifact: Buffer;
+    };
+}
+/**
+ * Input captures all that is needed to call the bundle verification method,
+ * to verify a single artifact referenced by the bundle.
+ */
+export interface Input {
+    /**
+     * The verification materials provided during a bundle verification.
+     * The running process is usually preloaded with a "global"
+     * dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to
+     * verifying an artifact (i.e a bundle), and/or based on current
+     * policy, some selection is expected to happen, to filter out the
+     * exact certificate authority to use, which transparency logs are
+     * relevant etc. The result should b ecaptured in the
+     * `artifact_trust_root`.
+     */
+    artifactTrustRoot: TrustedRoot | undefined;
+    artifactVerificationOptions: ArtifactVerificationOptions | undefined;
+    bundle: Bundle | undefined;
+    /**
+     * If the bundle contains a message signature, the artifact must be
+     * provided.
+     */
+    artifact?: Artifact | undefined;
+}
+export declare const CertificateIdentity: {
+    fromJSON(object: any): CertificateIdentity;
+    toJSON(message: CertificateIdentity): unknown;
+};
+export declare const CertificateIdentities: {
+    fromJSON(object: any): CertificateIdentities;
+    toJSON(message: CertificateIdentities): unknown;
+};
+export declare const PublicKeyIdentities: {
+    fromJSON(object: any): PublicKeyIdentities;
+    toJSON(message: PublicKeyIdentities): unknown;
+};
+export declare const ArtifactVerificationOptions: {
+    fromJSON(object: any): ArtifactVerificationOptions;
+    toJSON(message: ArtifactVerificationOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_TlogOptions: {
+    fromJSON(object: any): ArtifactVerificationOptions_TlogOptions;
+    toJSON(message: ArtifactVerificationOptions_TlogOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_CtlogOptions: {
+    fromJSON(object: any): ArtifactVerificationOptions_CtlogOptions;
+    toJSON(message: ArtifactVerificationOptions_CtlogOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_TimestampAuthorityOptions: {
+    fromJSON(object: any): ArtifactVerificationOptions_TimestampAuthorityOptions;
+    toJSON(message: ArtifactVerificationOptions_TimestampAuthorityOptions): unknown;
+};
+export declare const Artifact: {
+    fromJSON(object: any): Artifact;
+    toJSON(message: Artifact): unknown;
+};
+export declare const Input: {
+    fromJSON(object: any): Input;
+    toJSON(message: Input): unknown;
+};
diff --git a/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..b99a305ba5317
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+    return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.issuer !== undefined && (obj.issuer = message.issuer);
+        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+        if (message.oids) {
+            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+        }
+        else {
+            obj.oids = [];
+        }
+        return obj;
+    },
+};
+function createBaseCertificateIdentities() {
+    return { identities: [] };
+}
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities) {
+            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+        }
+        else {
+            obj.identities = [];
+        }
+        return obj;
+    },
+};
+function createBasePublicKeyIdentities() {
+    return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys) {
+            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+        }
+        else {
+            obj.publicKeys = [];
+        }
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions() {
+    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signers?.$case === "certificateIdentities" &&
+            (obj.certificateIdentities = message.signers?.certificateIdentities
+                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+                : undefined);
+        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+            : undefined);
+        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+            : undefined);
+        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+            : undefined);
+        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+            : undefined);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+    return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.performOnlineVerification !== undefined &&
+            (obj.performOnlineVerification = message.performOnlineVerification);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+    return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+    return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifact() {
+    return { data: undefined };
+}
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+        message.data?.$case === "artifact" &&
+            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+        return obj;
+    },
+};
+function createBaseInput() {
+    return {
+        artifactTrustRoot: undefined,
+        artifactVerificationOptions: undefined,
+        bundle: undefined,
+        artifact: undefined,
+    };
+}
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.artifactTrustRoot !== undefined &&
+            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+        message.artifactVerificationOptions !== undefined &&
+            (obj.artifactVerificationOptions = message.artifactVerificationOptions
+                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+                : undefined);
+        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+        return obj;
+    },
+};
+var globalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (globalThis.Buffer) {
+        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = globalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (globalThis.Buffer) {
+        return globalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return globalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/dist/types/sigstore/index.d.ts b/node_modules/sigstore/dist/types/sigstore/index.d.ts
new file mode 100644
index 0000000000000..26dd2150d548e
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/index.d.ts
@@ -0,0 +1,46 @@
+/// <reference types="node" />
+import { Entry } from '../../tlog';
+import { x509Certificate } from '../../x509/cert';
+import { SignatureMaterial } from '../signature';
+import { WithRequired } from '../utility';
+import { ValidBundle } from './validate';
+import { Envelope } from './__generated__/envelope';
+import { Bundle, VerificationMaterial } from './__generated__/sigstore_bundle';
+import { TransparencyLogEntry } from './__generated__/sigstore_rekor';
+import { ArtifactVerificationOptions } from './__generated__/sigstore_verification';
+export * from './serialized';
+export * from './validate';
+export * from './__generated__/envelope';
+export * from './__generated__/sigstore_bundle';
+export * from './__generated__/sigstore_common';
+export { TransparencyLogEntry } from './__generated__/sigstore_rekor';
+export * from './__generated__/sigstore_trustroot';
+export * from './__generated__/sigstore_verification';
+export declare const bundleToJSON: (message: Bundle) => unknown;
+export declare const bundleFromJSON: (obj: any) => ValidBundle;
+export declare const envelopeToJSON: (message: Envelope) => unknown;
+export declare const envelopeFromJSON: (object: any) => Envelope;
+export type BundleWithVerificationMaterial = WithRequired<Bundle, 'verificationMaterial'>;
+export declare function isBundleWithVerificationMaterial(bundle: Bundle): bundle is BundleWithVerificationMaterial;
+export type BundleWithCertificateChain = Bundle & {
+    verificationMaterial: VerificationMaterial & {
+        content: Extract<VerificationMaterial['content'], {
+            $case: 'x509CertificateChain';
+        }>;
+    };
+};
+export declare function isBundleWithCertificateChain(bundle: Bundle): bundle is BundleWithCertificateChain;
+export type RequiredArtifactVerificationOptions = WithRequired<ArtifactVerificationOptions, 'ctlogOptions' | 'tlogOptions'>;
+export type CAArtifactVerificationOptions = WithRequired<ArtifactVerificationOptions, 'ctlogOptions'> & {
+    signers?: Extract<ArtifactVerificationOptions['signers'], {
+        $case: 'certificateIdentities';
+    }>;
+};
+export declare function isCAVerificationOptions(options: ArtifactVerificationOptions): options is CAArtifactVerificationOptions;
+export type VerifiableTransparencyLogEntry = WithRequired<TransparencyLogEntry, 'logId' | 'inclusionPromise' | 'kindVersion'>;
+export declare function isVerifiableTransparencyLogEntry(entry: TransparencyLogEntry): entry is VerifiableTransparencyLogEntry;
+export declare const bundle: {
+    toDSSEBundle: (envelope: Envelope, signature: SignatureMaterial, rekorEntry: Entry) => Bundle;
+    toMessageSignatureBundle: (digest: Buffer, signature: SignatureMaterial, rekorEntry: Entry) => Bundle;
+};
+export declare function signingCertificate(bundle: Bundle): x509Certificate | undefined;
diff --git a/node_modules/sigstore/dist/types/sigstore/index.js b/node_modules/sigstore/dist/types/sigstore/index.js
new file mode 100644
index 0000000000000..df07d6dc9fc29
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/index.js
@@ -0,0 +1,144 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.signingCertificate = exports.bundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.isBundleWithVerificationMaterial = exports.envelopeFromJSON = exports.envelopeToJSON = exports.bundleFromJSON = exports.bundleToJSON = exports.TransparencyLogEntry = void 0;
+const util_1 = require("../../util");
+const cert_1 = require("../../x509/cert");
+const validate_1 = require("./validate");
+const envelope_1 = require("./__generated__/envelope");
+const sigstore_bundle_1 = require("./__generated__/sigstore_bundle");
+const sigstore_common_1 = require("./__generated__/sigstore_common");
+__exportStar(require("./serialized"), exports);
+__exportStar(require("./validate"), exports);
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+var sigstore_rekor_1 = require("./__generated__/sigstore_rekor");
+Object.defineProperty(exports, "TransparencyLogEntry", { enumerable: true, get: function () { return sigstore_rekor_1.TransparencyLogEntry; } });
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
+exports.bundleToJSON = sigstore_bundle_1.Bundle.toJSON;
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+const bundleFromJSON = (obj) => {
+    const bundle = sigstore_bundle_1.Bundle.fromJSON(obj);
+    (0, validate_1.assertValidBundle)(bundle);
+    return bundle;
+};
+exports.bundleFromJSON = bundleFromJSON;
+exports.envelopeToJSON = envelope_1.Envelope.toJSON;
+exports.envelopeFromJSON = envelope_1.Envelope.fromJSON;
+const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
+// Type guard for narrowing a Bundle to a BundleWithVerificationMaterial
+function isBundleWithVerificationMaterial(bundle) {
+    return bundle.verificationMaterial !== undefined;
+}
+exports.isBundleWithVerificationMaterial = isBundleWithVerificationMaterial;
+// Type guard for narrowing a Bundle to a BundleWithCertificateChain
+function isBundleWithCertificateChain(bundle) {
+    return (isBundleWithVerificationMaterial(bundle) &&
+        bundle.verificationMaterial.content !== undefined &&
+        bundle.verificationMaterial.content.$case === 'x509CertificateChain');
+}
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+function isCAVerificationOptions(options) {
+    return (options.ctlogOptions !== undefined &&
+        (options.signers === undefined ||
+            options.signers.$case === 'certificateIdentities'));
+}
+exports.isCAVerificationOptions = isCAVerificationOptions;
+function isVerifiableTransparencyLogEntry(entry) {
+    return (entry.logId !== undefined &&
+        entry.inclusionPromise !== undefined &&
+        entry.kindVersion !== undefined);
+}
+exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry;
+exports.bundle = {
+    toDSSEBundle: (envelope, signature, rekorEntry) => ({
+        mediaType: BUNDLE_MEDIA_TYPE,
+        content: {
+            $case: 'dsseEnvelope',
+            dsseEnvelope: envelope,
+        },
+        verificationMaterial: toVerificationMaterial(signature, rekorEntry),
+    }),
+    toMessageSignatureBundle: (digest, signature, rekorEntry) => ({
+        mediaType: BUNDLE_MEDIA_TYPE,
+        content: {
+            $case: 'messageSignature',
+            messageSignature: {
+                messageDigest: {
+                    algorithm: sigstore_common_1.HashAlgorithm.SHA2_256,
+                    digest: digest,
+                },
+                signature: signature.signature,
+            },
+        },
+        verificationMaterial: toVerificationMaterial(signature, rekorEntry),
+    }),
+};
+function toTransparencyLogEntry(entry) {
+    const set = Buffer.from(entry.verification.signedEntryTimestamp, 'base64');
+    const logID = Buffer.from(entry.logID, 'hex');
+    // Parse entry body so we can extract the kind and version.
+    const bodyJSON = util_1.encoding.base64Decode(entry.body);
+    const entryBody = JSON.parse(bodyJSON);
+    return {
+        inclusionPromise: {
+            signedEntryTimestamp: set,
+        },
+        logIndex: entry.logIndex.toString(),
+        logId: {
+            keyId: logID,
+        },
+        integratedTime: entry.integratedTime.toString(),
+        kindVersion: {
+            kind: entryBody.kind,
+            version: entryBody.apiVersion,
+        },
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.from(entry.body, 'base64'),
+    };
+}
+function toVerificationMaterial(signature, entry) {
+    return {
+        content: signature.certificates
+            ? toVerificationMaterialx509CertificateChain(signature.certificates)
+            : toVerificationMaterialPublicKey(signature.key.id || ''),
+        tlogEntries: [toTransparencyLogEntry(entry)],
+        timestampVerificationData: undefined,
+    };
+}
+function toVerificationMaterialx509CertificateChain(certificates) {
+    return {
+        $case: 'x509CertificateChain',
+        x509CertificateChain: {
+            certificates: certificates.map((c) => ({
+                rawBytes: util_1.pem.toDER(c),
+            })),
+        },
+    };
+}
+function toVerificationMaterialPublicKey(hint) {
+    return { $case: 'publicKey', publicKey: { hint } };
+}
+function signingCertificate(bundle) {
+    if (!isBundleWithCertificateChain(bundle)) {
+        return undefined;
+    }
+    const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0];
+    return cert_1.x509Certificate.parse(signingCert.rawBytes);
+}
+exports.signingCertificate = signingCertificate;
diff --git a/node_modules/sigstore/dist/types/sigstore/serialized.d.ts b/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
new file mode 100644
index 0000000000000..31cb2ce03fd30
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
@@ -0,0 +1,74 @@
+import { OneOf } from '../utility';
+type SerializedTLogEntry = {
+    logIndex: string;
+    logId: {
+        keyId: string;
+    };
+    kindVersion: {
+        kind: string;
+        version: string;
+    } | undefined;
+    integratedTime: string;
+    inclusionPromise: {
+        signedEntryTimestamp: string;
+    };
+    inclusionProof: {
+        logIndex: string;
+        rootHash: string;
+        treeSize: string;
+        hashes: string[];
+        checkpoint: {
+            envelope: string;
+        };
+    } | undefined;
+    canonicalizedBody: string;
+};
+type SerializedTimestampVerificationData = {
+    rfc3161Timestamps: {
+        signedTimestamp: string;
+    }[];
+};
+type SerializedMessageSignature = {
+    messageDigest: {
+        algorithm: string;
+        digest: string;
+    } | undefined;
+    signature: string;
+};
+type SerializedDSSEEnvelope = {
+    payload: string;
+    payloadType: string;
+    signatures: {
+        sig: string;
+        keyid: string;
+    }[];
+};
+export type SerializedBundle = {
+    mediaType: string;
+    verificationMaterial: (OneOf<{
+        x509CertificateChain: {
+            certificates: {
+                rawBytes: string;
+            }[];
+        };
+        publicKey: {
+            hint: string;
+        };
+    }> | undefined) & {
+        tlogEntries: SerializedTLogEntry[];
+        timestampVerificationData: SerializedTimestampVerificationData | undefined;
+    };
+} & OneOf<{
+    dsseEnvelope: SerializedDSSEEnvelope;
+    messageSignature: SerializedMessageSignature;
+}>;
+interface SerializedSignature {
+    sig: string;
+    keyid: string;
+}
+export type SerializedEnvelope = {
+    payload: string;
+    payloadType: string;
+    signatures: SerializedSignature[];
+};
+export {};
diff --git a/node_modules/sigstore/dist/types/sigstore/serialized.js b/node_modules/sigstore/dist/types/sigstore/serialized.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/serialized.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/types/sigstore/validate.d.ts b/node_modules/sigstore/dist/types/sigstore/validate.d.ts
new file mode 100644
index 0000000000000..fd0a354282426
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/validate.d.ts
@@ -0,0 +1,16 @@
+import { WithRequired } from '../utility';
+import { Bundle, VerificationMaterial } from './__generated__/sigstore_bundle';
+import { MessageSignature } from './__generated__/sigstore_common';
+export type ValidBundle = Bundle & {
+    verificationMaterial: VerificationMaterial & {
+        content: NonNullable<VerificationMaterial['content']>;
+    };
+    content: (Extract<Bundle['content'], {
+        $case: 'messageSignature';
+    }> & {
+        messageSignature: WithRequired<MessageSignature, 'messageDigest'>;
+    }) | Extract<Bundle['content'], {
+        $case: 'dsseEnvelope';
+    }>;
+};
+export declare function assertValidBundle(b: Bundle): asserts b is ValidBundle;
diff --git a/node_modules/sigstore/dist/types/sigstore/validate.js b/node_modules/sigstore/dist/types/sigstore/validate.js
new file mode 100644
index 0000000000000..a19d8ad3ec702
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore/validate.js
@@ -0,0 +1,88 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertValidBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+// Performs basic validation of a Sigstore bundle to ensure that all required
+// fields are populated. This is not a complete validation of the bundle, but
+// rather a check that the bundle is in a valid state to be processed by the
+// rest of the code.
+function assertValidBundle(b) {
+    const invalidValues = [];
+    // Content-related validation
+    if (b.content === undefined) {
+        invalidValues.push('content');
+    }
+    else {
+        switch (b.content.$case) {
+            case 'messageSignature':
+                if (b.content.messageSignature.messageDigest === undefined) {
+                    invalidValues.push('content.messageSignature.messageDigest');
+                }
+                else {
+                    if (b.content.messageSignature.messageDigest.digest.length === 0) {
+                        invalidValues.push('content.messageSignature.messageDigest.digest');
+                    }
+                }
+                if (b.content.messageSignature.signature.length === 0) {
+                    invalidValues.push('content.messageSignature.signature');
+                }
+                break;
+            case 'dsseEnvelope':
+                if (b.content.dsseEnvelope.payload.length === 0) {
+                    invalidValues.push('content.dsseEnvelope.payload');
+                }
+                if (b.content.dsseEnvelope.signatures.length !== 1) {
+                    invalidValues.push('content.dsseEnvelope.signatures');
+                }
+                else {
+                    if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
+                        invalidValues.push('content.dsseEnvelope.signatures[0].sig');
+                    }
+                }
+                break;
+        }
+    }
+    // Verification material-related validation
+    if (b.verificationMaterial === undefined) {
+        invalidValues.push('verificationMaterial');
+    }
+    else {
+        if (b.verificationMaterial.content === undefined) {
+            invalidValues.push('verificationMaterial.content');
+        }
+        else {
+            switch (b.verificationMaterial.content.$case) {
+                case 'x509CertificateChain':
+                    if (b.verificationMaterial.content.x509CertificateChain.certificates
+                        .length === 0) {
+                        invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
+                    }
+                    b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
+                        if (cert.rawBytes.length === 0) {
+                            invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
+                        }
+                    });
+                    break;
+            }
+        }
+    }
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError(`invalid/missing bundle values: ${invalidValues.join(', ')}`);
+    }
+}
+exports.assertValidBundle = assertValidBundle;
diff --git a/node_modules/sigstore/dist/types/utility.d.ts b/node_modules/sigstore/dist/types/utility.d.ts
new file mode 100644
index 0000000000000..df993d503f8ea
--- /dev/null
+++ b/node_modules/sigstore/dist/types/utility.d.ts
@@ -0,0 +1,14 @@
+type ValueOf<Obj> = Obj[keyof Obj];
+type OneOnly<Obj, K extends keyof Obj> = {
+    [key in Exclude<keyof Obj, K>]: undefined;
+} & {
+    [key in K]: Obj[K];
+};
+type OneOfByKey<Obj> = {
+    [key in keyof Obj]: OneOnly<Obj, key>;
+};
+export type OneOf<T> = ValueOf<OneOfByKey<T>>;
+export type WithRequired<T, K extends keyof T> = T & {
+    [P in K]-?: NonNullable<T[P]>;
+};
+export {};
diff --git a/node_modules/sigstore/dist/types/utility.js b/node_modules/sigstore/dist/types/utility.js
new file mode 100644
index 0000000000000..132848cd7587e
--- /dev/null
+++ b/node_modules/sigstore/dist/types/utility.js
@@ -0,0 +1,18 @@
+"use strict";
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// https://dev.to/maxime1992/implement-a-generic-oneof-type-with-typescript-22em
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/util/crypto.d.ts b/node_modules/sigstore/dist/util/crypto.d.ts
new file mode 100644
index 0000000000000..a726dd260750c
--- /dev/null
+++ b/node_modules/sigstore/dist/util/crypto.d.ts
@@ -0,0 +1,10 @@
+/// <reference types="node" />
+/// <reference types="node" />
+/// <reference types="node" />
+import { BinaryLike, KeyLike, KeyPairKeyObjectResult } from 'crypto';
+export declare function generateKeyPair(): KeyPairKeyObjectResult;
+export declare function createPublicKey(key: string | Buffer): KeyLike;
+export declare function signBlob(data: NodeJS.ArrayBufferView, privateKey: KeyLike): Buffer;
+export declare function verifyBlob(data: Buffer, key: KeyLike, signature: Buffer, algorithm?: string): boolean;
+export declare function hash(data: BinaryLike): Buffer;
+export declare function randomBytes(count: number): Buffer;
diff --git a/node_modules/sigstore/dist/util/crypto.js b/node_modules/sigstore/dist/util/crypto.js
new file mode 100644
index 0000000000000..0b1e0bc62d8ab
--- /dev/null
+++ b/node_modules/sigstore/dist/util/crypto.js
@@ -0,0 +1,64 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.randomBytes = exports.hash = exports.verifyBlob = exports.signBlob = exports.createPublicKey = exports.generateKeyPair = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const EC_KEYPAIR_TYPE = 'ec';
+const P256_CURVE = 'P-256';
+const SHA256_ALGORITHM = 'sha256';
+function generateKeyPair() {
+    return crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
+        namedCurve: P256_CURVE,
+    });
+}
+exports.generateKeyPair = generateKeyPair;
+function createPublicKey(key) {
+    if (typeof key === 'string') {
+        return crypto_1.default.createPublicKey(key);
+    }
+    else {
+        return crypto_1.default.createPublicKey({ key, format: 'der', type: 'spki' });
+    }
+}
+exports.createPublicKey = createPublicKey;
+function signBlob(data, privateKey) {
+    return crypto_1.default.sign(null, data, privateKey);
+}
+exports.signBlob = signBlob;
+function verifyBlob(data, key, signature, algorithm) {
+    // The try/catch is to work around an issue in Node 14.x where verify throws
+    // an error in some scenarios if the signature is invalid.
+    try {
+        return crypto_1.default.verify(algorithm, data, key, signature);
+    }
+    catch (e) {
+        return false;
+    }
+}
+exports.verifyBlob = verifyBlob;
+function hash(data) {
+    const hash = crypto_1.default.createHash(SHA256_ALGORITHM);
+    return hash.update(data).digest();
+}
+exports.hash = hash;
+function randomBytes(count) {
+    return crypto_1.default.randomBytes(count);
+}
+exports.randomBytes = randomBytes;
diff --git a/node_modules/sigstore/dist/util/dsse.d.ts b/node_modules/sigstore/dist/util/dsse.d.ts
new file mode 100644
index 0000000000000..839b9c03ce38c
--- /dev/null
+++ b/node_modules/sigstore/dist/util/dsse.d.ts
@@ -0,0 +1,2 @@
+/// <reference types="node" />
+export declare function preAuthEncoding(payloadType: string, payload: Buffer): Buffer;
diff --git a/node_modules/sigstore/dist/util/dsse.js b/node_modules/sigstore/dist/util/dsse.js
new file mode 100644
index 0000000000000..bba7baa6bbfb8
--- /dev/null
+++ b/node_modules/sigstore/dist/util/dsse.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.preAuthEncoding = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PAE_PREFIX = 'DSSEv1';
+// DSSE Pre-Authentication Encoding
+function preAuthEncoding(payloadType, payload) {
+    const prefix = Buffer.from(`${PAE_PREFIX} ${payloadType.length} ${payloadType} ${payload.length} `, 'ascii');
+    return Buffer.concat([prefix, payload]);
+}
+exports.preAuthEncoding = preAuthEncoding;
diff --git a/node_modules/sigstore/dist/util/encoding.d.ts b/node_modules/sigstore/dist/util/encoding.d.ts
new file mode 100644
index 0000000000000..f1347c241ed0c
--- /dev/null
+++ b/node_modules/sigstore/dist/util/encoding.d.ts
@@ -0,0 +1,6 @@
+export declare function base64Encode(str: string): string;
+export declare function base64Decode(str: string): string;
+export declare function base64URLEncode(str: string): string;
+export declare function base64URLDecode(str: string): string;
+export declare function base64URLEscape(str: string): string;
+export declare function base64URLUnescape(str: string): string;
diff --git a/node_modules/sigstore/dist/util/encoding.js b/node_modules/sigstore/dist/util/encoding.js
new file mode 100644
index 0000000000000..8674d779b61f9
--- /dev/null
+++ b/node_modules/sigstore/dist/util/encoding.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.base64URLUnescape = exports.base64URLEscape = exports.base64URLDecode = exports.base64URLEncode = exports.base64Decode = exports.base64Encode = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const BASE64_ENCODING = 'base64';
+const UTF8_ENCODING = 'utf-8';
+function base64Encode(str) {
+    return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
+}
+exports.base64Encode = base64Encode;
+function base64Decode(str) {
+    return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
+}
+exports.base64Decode = base64Decode;
+function base64URLEncode(str) {
+    return base64URLEscape(base64Encode(str));
+}
+exports.base64URLEncode = base64URLEncode;
+function base64URLDecode(str) {
+    return base64Decode(base64URLUnescape(str));
+}
+exports.base64URLDecode = base64URLDecode;
+function base64URLEscape(str) {
+    return str.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '');
+}
+exports.base64URLEscape = base64URLEscape;
+function base64URLUnescape(str) {
+    // Repad the base64 string if necessary
+    str += '='.repeat((4 - (str.length % 4)) % 4);
+    return str.replace(/-/g, '+').replace(/_/g, '/');
+}
+exports.base64URLUnescape = base64URLUnescape;
diff --git a/node_modules/sigstore/dist/util/index.d.ts b/node_modules/sigstore/dist/util/index.d.ts
new file mode 100644
index 0000000000000..786a19630cd60
--- /dev/null
+++ b/node_modules/sigstore/dist/util/index.d.ts
@@ -0,0 +1,8 @@
+export * as crypto from './crypto';
+export * as dsse from './dsse';
+export * as encoding from './encoding';
+export * as json from './json';
+export * as oidc from './oidc';
+export * as pem from './pem';
+export * as promise from './promise';
+export * as ua from './ua';
diff --git a/node_modules/sigstore/dist/util/index.js b/node_modules/sigstore/dist/util/index.js
new file mode 100644
index 0000000000000..2c02116cbf07d
--- /dev/null
+++ b/node_modules/sigstore/dist/util/index.js
@@ -0,0 +1,49 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+exports.crypto = __importStar(require("./crypto"));
+exports.dsse = __importStar(require("./dsse"));
+exports.encoding = __importStar(require("./encoding"));
+exports.json = __importStar(require("./json"));
+exports.oidc = __importStar(require("./oidc"));
+exports.pem = __importStar(require("./pem"));
+exports.promise = __importStar(require("./promise"));
+exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/sigstore/dist/util/json.d.ts b/node_modules/sigstore/dist/util/json.d.ts
new file mode 100644
index 0000000000000..ed331817ef236
--- /dev/null
+++ b/node_modules/sigstore/dist/util/json.d.ts
@@ -0,0 +1 @@
+export declare function canonicalize(object: any): string;
diff --git a/node_modules/sigstore/dist/util/json.js b/node_modules/sigstore/dist/util/json.js
new file mode 100644
index 0000000000000..69176ad731eb7
--- /dev/null
+++ b/node_modules/sigstore/dist/util/json.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+    let buffer = '';
+    if (object === null || typeof object !== 'object' || object.toJSON != null) {
+        // Primitives or toJSONable objects
+        buffer += JSON.stringify(object);
+    }
+    else if (Array.isArray(object)) {
+        // Array - maintain element order
+        buffer += '[';
+        let first = true;
+        object.forEach((element) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            // recursive call
+            buffer += canonicalize(element);
+        });
+        buffer += ']';
+    }
+    else {
+        // Object - Sort properties before serializing
+        buffer += '{';
+        let first = true;
+        Object.keys(object)
+            .sort()
+            .forEach((property) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            buffer += JSON.stringify(property);
+            buffer += ':';
+            // recursive call
+            buffer += canonicalize(object[property]);
+        });
+        buffer += '}';
+    }
+    return buffer;
+}
+exports.canonicalize = canonicalize;
diff --git a/node_modules/sigstore/dist/util/oidc.d.ts b/node_modules/sigstore/dist/util/oidc.d.ts
new file mode 100644
index 0000000000000..b4513891a3527
--- /dev/null
+++ b/node_modules/sigstore/dist/util/oidc.d.ts
@@ -0,0 +1 @@
+export declare function extractJWTSubject(jwt: string): string;
diff --git a/node_modules/sigstore/dist/util/oidc.js b/node_modules/sigstore/dist/util/oidc.js
new file mode 100644
index 0000000000000..05af90d09ae68
--- /dev/null
+++ b/node_modules/sigstore/dist/util/oidc.js
@@ -0,0 +1,54 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extractJWTSubject = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const enc = __importStar(require("./encoding"));
+function extractJWTSubject(jwt) {
+    const parts = jwt.split('.', 3);
+    const payload = JSON.parse(enc.base64Decode(parts[1]));
+    switch (payload.iss) {
+        case 'https://accounts.google.com':
+        case 'https://oauth2.sigstore.dev/auth':
+            return payload.email;
+        default:
+            return payload.sub;
+    }
+}
+exports.extractJWTSubject = extractJWTSubject;
diff --git a/node_modules/sigstore/dist/util/pem.d.ts b/node_modules/sigstore/dist/util/pem.d.ts
new file mode 100644
index 0000000000000..cb438c7937551
--- /dev/null
+++ b/node_modules/sigstore/dist/util/pem.d.ts
@@ -0,0 +1,4 @@
+/// <reference types="node" />
+export declare function split(certificate: string): string[];
+export declare function toDER(certificate: string): Buffer;
+export declare function fromDER(certificate: Buffer, type?: string): string;
diff --git a/node_modules/sigstore/dist/util/pem.js b/node_modules/sigstore/dist/util/pem.js
new file mode 100644
index 0000000000000..6bb8eda62a2ef
--- /dev/null
+++ b/node_modules/sigstore/dist/util/pem.js
@@ -0,0 +1,65 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fromDER = exports.toDER = exports.split = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PEM_HEADER = /-----BEGIN (.*)-----/;
+const PEM_FOOTER = /-----END (.*)-----/;
+// Given a set of PEM-encoded certificates bundled in a single string, returns
+// an array of certificates. Standard PEM encoding dictates that each certificate
+// should have a trailing newline after the footer.
+function split(certificate) {
+    const certs = [];
+    let cert = [];
+    certificate.split('\n').forEach((line) => {
+        line.includes;
+        if (line.match(PEM_HEADER)) {
+            cert = [];
+        }
+        if (line.length > 0) {
+            cert.push(line);
+        }
+        if (line.match(PEM_FOOTER)) {
+            certs.push(cert.join('\n').concat('\n'));
+        }
+    });
+    return certs;
+}
+exports.split = split;
+function toDER(certificate) {
+    let der = '';
+    certificate.split('\n').forEach((line) => {
+        if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) {
+            return;
+        }
+        der += line;
+    });
+    return Buffer.from(der, 'base64');
+}
+exports.toDER = toDER;
+// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
+// encoding dictates that each certificate should have a trailing newline after
+// the footer.
+function fromDER(certificate, type = 'CERTIFICATE') {
+    // Base64-encode the certificate.
+    const der = certificate.toString('base64');
+    // Split the certificate into lines of 64 characters.
+    const lines = der.match(/.{1,64}/g) || '';
+    return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`]
+        .join('\n')
+        .concat('\n');
+}
+exports.fromDER = fromDER;
diff --git a/node_modules/sigstore/dist/util/promise.d.ts b/node_modules/sigstore/dist/util/promise.d.ts
new file mode 100644
index 0000000000000..bbc501a85a7c6
--- /dev/null
+++ b/node_modules/sigstore/dist/util/promise.d.ts
@@ -0,0 +1 @@
+export declare const promiseAny: <T>(values: Iterable<PromiseLike<T>>) => Promise<T>;
diff --git a/node_modules/sigstore/dist/util/promise.js b/node_modules/sigstore/dist/util/promise.js
new file mode 100644
index 0000000000000..8101dd47afe02
--- /dev/null
+++ b/node_modules/sigstore/dist/util/promise.js
@@ -0,0 +1,27 @@
+"use strict";
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.promiseAny = void 0;
+// Implementation of Promise.any (not available until Node v15).
+// We're basically inverting the logic of Promise.all and taking advantage
+// of the fact that Promise.all will return early on the first rejection.
+// By reversing the resolve/reject logic we can use this to return early
+// on the first resolved promise.
+const promiseAny = async (values) => {
+    return Promise.all([...values].map((promise) => new Promise((resolve, reject) => promise.then(reject, resolve)))).then((errors) => Promise.reject(errors), (value) => Promise.resolve(value));
+};
+exports.promiseAny = promiseAny;
diff --git a/node_modules/sigstore/dist/util/stream.d.ts b/node_modules/sigstore/dist/util/stream.d.ts
new file mode 100644
index 0000000000000..4d509565942e1
--- /dev/null
+++ b/node_modules/sigstore/dist/util/stream.d.ts
@@ -0,0 +1,24 @@
+/// <reference types="node" />
+export declare class StreamError extends Error {
+}
+export declare class ByteStream {
+    private static BLOCK_SIZE;
+    private buf;
+    private view;
+    private start;
+    constructor(buffer?: ArrayBuffer);
+    get buffer(): Buffer;
+    get length(): number;
+    get position(): number;
+    seek(position: number): void;
+    slice(start: number, len: number): Buffer;
+    appendChar(char: number): void;
+    appendUint16(num: number): void;
+    appendUint24(num: number): void;
+    appendView(view: Uint8Array): void;
+    getBlock(size: number): Buffer;
+    getUint8(): number;
+    getUint16(): number;
+    private ensureCapacity;
+    private realloc;
+}
diff --git a/node_modules/sigstore/dist/util/stream.js b/node_modules/sigstore/dist/util/stream.js
new file mode 100644
index 0000000000000..b5c881bb388d4
--- /dev/null
+++ b/node_modules/sigstore/dist/util/stream.js
@@ -0,0 +1,116 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ByteStream = exports.StreamError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class StreamError extends Error {
+}
+exports.StreamError = StreamError;
+class ByteStream {
+    constructor(buffer) {
+        this.start = 0;
+        if (buffer) {
+            this.buf = buffer;
+            this.view = Buffer.from(buffer);
+        }
+        else {
+            this.buf = new ArrayBuffer(0);
+            this.view = Buffer.from(this.buf);
+        }
+    }
+    get buffer() {
+        return this.view.subarray(0, this.start);
+    }
+    get length() {
+        return this.view.byteLength;
+    }
+    get position() {
+        return this.start;
+    }
+    seek(position) {
+        this.start = position;
+    }
+    // Returns a Buffer containing the specified number of bytes starting at the
+    // given start position.
+    slice(start, len) {
+        const end = start + len;
+        if (end > this.length) {
+            throw new StreamError('request past end of buffer');
+        }
+        return this.view.subarray(start, end);
+    }
+    appendChar(char) {
+        this.ensureCapacity(1);
+        this.view[this.start] = char;
+        this.start += 1;
+    }
+    appendUint16(num) {
+        this.ensureCapacity(2);
+        const value = new Uint16Array([num]);
+        const view = new Uint8Array(value.buffer);
+        this.view[this.start] = view[1];
+        this.view[this.start + 1] = view[0];
+        this.start += 2;
+    }
+    appendUint24(num) {
+        this.ensureCapacity(3);
+        const value = new Uint32Array([num]);
+        const view = new Uint8Array(value.buffer);
+        this.view[this.start] = view[2];
+        this.view[this.start + 1] = view[1];
+        this.view[this.start + 2] = view[0];
+        this.start += 3;
+    }
+    appendView(view) {
+        this.ensureCapacity(view.length);
+        this.view.set(view, this.start);
+        this.start += view.length;
+    }
+    getBlock(size) {
+        if (size <= 0) {
+            return Buffer.alloc(0);
+        }
+        if (this.start + size > this.view.length) {
+            throw new Error('request past end of buffer');
+        }
+        const result = this.view.subarray(this.start, this.start + size);
+        this.start += size;
+        return result;
+    }
+    getUint8() {
+        return this.getBlock(1)[0];
+    }
+    getUint16() {
+        const block = this.getBlock(2);
+        return (block[0] << 8) | block[1];
+    }
+    ensureCapacity(size) {
+        if (this.start + size > this.view.byteLength) {
+            const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0);
+            this.realloc(this.view.byteLength + blockSize);
+        }
+    }
+    realloc(size) {
+        const newArray = new ArrayBuffer(size);
+        const newView = Buffer.from(newArray);
+        // Copy the old buffer into the new one
+        newView.set(this.view);
+        this.buf = newArray;
+        this.view = newView;
+    }
+}
+exports.ByteStream = ByteStream;
+ByteStream.BLOCK_SIZE = 1024;
diff --git a/node_modules/sigstore/dist/util/ua.d.ts b/node_modules/sigstore/dist/util/ua.d.ts
new file mode 100644
index 0000000000000..b60e2e9c3e537
--- /dev/null
+++ b/node_modules/sigstore/dist/util/ua.d.ts
@@ -0,0 +1 @@
+export declare const getUserAgent: () => string;
diff --git a/node_modules/sigstore/dist/util/ua.js b/node_modules/sigstore/dist/util/ua.js
new file mode 100644
index 0000000000000..6db6b5a2723db
--- /dev/null
+++ b/node_modules/sigstore/dist/util/ua.js
@@ -0,0 +1,33 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getUserAgent = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+// Format User-Agent: <product> / <product-version> (<platform>)
+// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
+const getUserAgent = () => {
+    // eslint-disable-next-line @typescript-eslint/no-var-requires
+    const packageVersion = require('../../package.json').version;
+    const nodeVersion = process.version;
+    const platformName = os_1.default.platform();
+    const archName = os_1.default.arch();
+    return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
+};
+exports.getUserAgent = getUserAgent;
diff --git a/node_modules/sigstore/dist/verify.d.ts b/node_modules/sigstore/dist/verify.d.ts
new file mode 100644
index 0000000000000..819d0dadf1be5
--- /dev/null
+++ b/node_modules/sigstore/dist/verify.d.ts
@@ -0,0 +1,13 @@
+/// <reference types="node" />
+import * as sigstore from './types/sigstore';
+export type KeySelector = (hint: string) => string | Buffer | undefined;
+export declare class Verifier {
+    private trustedRoot;
+    private keySelector;
+    constructor(trustedRoot: sigstore.TrustedRoot, keySelector?: KeySelector);
+    verify(bundle: sigstore.ValidBundle, options: sigstore.RequiredArtifactVerificationOptions, data?: Buffer): void;
+    private verifyArtifactSignature;
+    private verifySigningCertificate;
+    private verifyTLogEntries;
+    private getPublicKey;
+}
diff --git a/node_modules/sigstore/dist/verify.js b/node_modules/sigstore/dist/verify.js
new file mode 100644
index 0000000000000..1bcef03b5f7ba
--- /dev/null
+++ b/node_modules/sigstore/dist/verify.js
@@ -0,0 +1,142 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Verifier = void 0;
+const ca = __importStar(require("./ca/verify"));
+const error_1 = require("./error");
+const tlog = __importStar(require("./tlog/verify"));
+const sigstore = __importStar(require("./types/sigstore"));
+const util_1 = require("./util");
+class Verifier {
+    constructor(trustedRoot, keySelector) {
+        this.trustedRoot = trustedRoot;
+        this.keySelector = keySelector || (() => undefined);
+    }
+    // Verifies the bundle signature, the bundle's certificate chain (if present)
+    // and the bundle's transparency log entries.
+    verify(bundle, options, data) {
+        this.verifyArtifactSignature(bundle, options, data);
+        if (sigstore.isBundleWithCertificateChain(bundle)) {
+            this.verifySigningCertificate(bundle, options);
+        }
+        this.verifyTLogEntries(bundle, options);
+    }
+    // Performs bundle signature verification. Determines the type of the bundle
+    // content and delegates to the appropriate signature verification function.
+    verifyArtifactSignature(bundle, options, data) {
+        const publicKey = this.getPublicKey(bundle, options);
+        switch (bundle.content?.$case) {
+            case 'messageSignature':
+                if (!data) {
+                    throw new error_1.VerificationError('no data provided for message signature verification');
+                }
+                verifyMessageSignature(data, bundle.content.messageSignature, publicKey);
+                break;
+            case 'dsseEnvelope':
+                verifyDSSESignature(bundle.content.dsseEnvelope, publicKey);
+                break;
+        }
+    }
+    // Performs verification of the bundle's certificate chain. The bundle must
+    // contain a certificate chain and the options must contain the required
+    // options for CA verification.
+    // TODO: We've temporarily removed the requirement that the options contain
+    // the list of trusted signer identities. This will be added back in a future
+    // release.
+    verifySigningCertificate(bundle, options) {
+        if (!sigstore.isCAVerificationOptions(options)) {
+            throw new error_1.VerificationError('no trusted certificates provided for verification');
+        }
+        ca.verifySigningCertificate(bundle, this.trustedRoot, options);
+    }
+    // Performs verification of the bundle's transparency log entries. The bundle
+    // must contain a list of transparency log entries.
+    verifyTLogEntries(bundle, options) {
+        tlog.verifyTLogEntries(bundle, this.trustedRoot, options.tlogOptions);
+    }
+    // Returns the public key which will be used to verify the bundle signature.
+    // The public key is selected based on the verification material in the bundle
+    // and the options provided.
+    getPublicKey(bundle, options) {
+        // Select the key which will be used to verify the signature
+        switch (bundle.verificationMaterial?.content?.$case) {
+            // If the bundle contains a certificate chain, the public key is the
+            // first certificate in the chain (the signing certificate)
+            case 'x509CertificateChain':
+                return getPublicKeyFromCertificateChain(bundle.verificationMaterial.content.x509CertificateChain);
+            // If the bundle contains a public key hint, the public key is selected
+            // from the list of trusted keys in the options
+            case 'publicKey':
+                return getPublicKeyFromHint(bundle.verificationMaterial.content.publicKey, options, this.keySelector);
+        }
+    }
+}
+exports.Verifier = Verifier;
+// Retrieves the public key from the first certificate in the certificate chain
+function getPublicKeyFromCertificateChain(certificateChain) {
+    const cert = util_1.pem.fromDER(certificateChain.certificates[0].rawBytes);
+    return util_1.crypto.createPublicKey(cert);
+}
+// Retrieves the public key through the key selector callback, passing the
+// public key hint from the bundle
+function getPublicKeyFromHint(publicKeyID, options, keySelector) {
+    const key = keySelector(publicKeyID.hint);
+    if (!key) {
+        throw new error_1.VerificationError('no public key found for signature verification');
+    }
+    try {
+        return util_1.crypto.createPublicKey(key);
+    }
+    catch (e) {
+        throw new error_1.VerificationError('invalid public key');
+    }
+}
+// Performs signature verification for bundle containing a message signature.
+// Verifies that the digest and signature found in the bundle match the
+// provided data.
+function verifyMessageSignature(data, messageSignature, publicKey) {
+    // Extract signature for message
+    const { signature, messageDigest } = messageSignature;
+    const calculatedDigest = util_1.crypto.hash(data);
+    if (!calculatedDigest.equals(messageDigest.digest)) {
+        throw new error_1.VerificationError('message digest verification failed');
+    }
+    if (!util_1.crypto.verifyBlob(data, publicKey, signature)) {
+        throw new error_1.VerificationError('artifact signature verification failed');
+    }
+}
+// Performs signature verification for bundle containing a DSSE envelope.
+// Calculates the PAE for the DSSE envelope and verifies it against the
+// signature in the envelope.
+function verifyDSSESignature(envelope, publicKey) {
+    // Construct payload over which the signature was originally created
+    const { payloadType, payload } = envelope;
+    const data = util_1.dsse.preAuthEncoding(payloadType, payload);
+    // Only support a single signature in DSSE
+    const signature = envelope.signatures[0].sig;
+    if (!util_1.crypto.verifyBlob(data, publicKey, signature)) {
+        throw new error_1.VerificationError('artifact signature verification failed');
+    }
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/dump.d.ts b/node_modules/sigstore/dist/x509/asn1/dump.d.ts
new file mode 100644
index 0000000000000..3f192dea45445
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/dump.d.ts
@@ -0,0 +1,2 @@
+import { ASN1Obj } from './obj';
+export declare function dump(obj: ASN1Obj, indent?: number): void;
diff --git a/node_modules/sigstore/dist/x509/asn1/dump.js b/node_modules/sigstore/dist/x509/asn1/dump.js
new file mode 100644
index 0000000000000..b44605455ba00
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/dump.js
@@ -0,0 +1,97 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.dump = void 0;
+const tag_1 = require("./tag");
+// Utility function to dump the contents of an ASN1Obj to the console.
+function dump(obj, indent = 0) {
+    let str = ' '.repeat(indent);
+    str += tagToString(obj.tag) + ' ';
+    if (obj.tag.isUniversal()) {
+        switch (obj.tag.number) {
+            case tag_1.UNIVERSAL_TAG.BOOLEAN:
+                str += obj.toBoolean();
+                break;
+            case tag_1.UNIVERSAL_TAG.INTEGER:
+                str += `(${obj.value.length} byte) `;
+                str += obj.toInteger();
+                break;
+            case tag_1.UNIVERSAL_TAG.BIT_STRING: {
+                const bits = obj.toBitString();
+                str += `(${bits.length} bit) `;
+                str += truncate(bits.map((bit) => bit.toString()).join(''));
+                break;
+            }
+            case tag_1.UNIVERSAL_TAG.OBJECT_IDENTIFIER:
+                str += obj.toOID();
+                break;
+            case tag_1.UNIVERSAL_TAG.SEQUENCE:
+            case tag_1.UNIVERSAL_TAG.SET:
+                str += `(${obj.subs.length} elem) `;
+                break;
+            case tag_1.UNIVERSAL_TAG.PRINTABLE_STRING:
+                str += obj.value.toString('ascii');
+                break;
+            case tag_1.UNIVERSAL_TAG.UTC_TIME:
+            case tag_1.UNIVERSAL_TAG.GENERALIZED_TIME:
+                str += obj.toDate().toUTCString();
+                break;
+            default:
+                str += `(${obj.value.length} byte) `;
+                str += isASCII(obj.value)
+                    ? obj.value.toString('ascii')
+                    : truncate(obj.value.toString('hex').toUpperCase());
+        }
+    }
+    else {
+        if (obj.tag.constructed) {
+            str += `(${obj.subs.length} elem) `;
+        }
+        else {
+            str += `(${obj.value.length} byte) `;
+            str += isASCII(obj.value)
+                ? obj.value.toString('ascii')
+                : obj.value.toString('hex').toUpperCase();
+        }
+    }
+    console.log(str);
+    // Recursive call for children
+    obj.subs.forEach((sub) => dump(sub, indent + 2));
+}
+exports.dump = dump;
+function tagToString(tag) {
+    if (tag.isContextSpecific()) {
+        return `[${tag.number.toString(16)}]`;
+    }
+    else {
+        switch (tag.number) {
+            case tag_1.UNIVERSAL_TAG.BOOLEAN:
+                return 'BOOLEAN';
+            case tag_1.UNIVERSAL_TAG.INTEGER:
+                return 'INTEGER';
+            case tag_1.UNIVERSAL_TAG.BIT_STRING:
+                return 'BIT STRING';
+            case tag_1.UNIVERSAL_TAG.OCTET_STRING:
+                return 'OCTET STRING';
+            case tag_1.UNIVERSAL_TAG.OBJECT_IDENTIFIER:
+                return 'OBJECT IDENTIFIER';
+            case tag_1.UNIVERSAL_TAG.SEQUENCE:
+                return 'SEQUENCE';
+            case tag_1.UNIVERSAL_TAG.SET:
+                return 'SET';
+            case tag_1.UNIVERSAL_TAG.PRINTABLE_STRING:
+                return 'PrintableString';
+            case tag_1.UNIVERSAL_TAG.UTC_TIME:
+                return 'UTCTime';
+            case tag_1.UNIVERSAL_TAG.GENERALIZED_TIME:
+                return 'GeneralizedTime';
+            default:
+                return tag.number.toString(16);
+        }
+    }
+}
+function isASCII(buf) {
+    return buf.every((b) => b >= 32 && b <= 126);
+}
+function truncate(str) {
+    return str.length > 70 ? str.substring(0, 69) + '...' : str;
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/error.d.ts b/node_modules/sigstore/dist/x509/asn1/error.d.ts
new file mode 100644
index 0000000000000..fcd908f47036a
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/error.d.ts
@@ -0,0 +1,4 @@
+export declare class ASN1ParseError extends Error {
+}
+export declare class ASN1TypeError extends Error {
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/error.js b/node_modules/sigstore/dist/x509/asn1/error.js
new file mode 100644
index 0000000000000..17d93b0f7e706
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/error.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1TypeError = exports.ASN1ParseError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ASN1ParseError extends Error {
+}
+exports.ASN1ParseError = ASN1ParseError;
+class ASN1TypeError extends Error {
+}
+exports.ASN1TypeError = ASN1TypeError;
diff --git a/node_modules/sigstore/dist/x509/asn1/length.d.ts b/node_modules/sigstore/dist/x509/asn1/length.d.ts
new file mode 100644
index 0000000000000..b9c2a2f3fcfcf
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/length.d.ts
@@ -0,0 +1,4 @@
+/// <reference types="node" />
+import { ByteStream } from '../../util/stream';
+export declare function decodeLength(stream: ByteStream): number;
+export declare function encodeLength(len: number): Buffer;
diff --git a/node_modules/sigstore/dist/x509/asn1/length.js b/node_modules/sigstore/dist/x509/asn1/length.js
new file mode 100644
index 0000000000000..36fdaf5b9777f
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/length.js
@@ -0,0 +1,63 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeLength = exports.decodeLength = void 0;
+const error_1 = require("./error");
+// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
+function decodeLength(stream) {
+    const buf = stream.getUint8();
+    // If the most significant bit is UNSET the length is just the value of the
+    // byte.
+    if ((buf & 0x80) === 0x00) {
+        return buf;
+    }
+    // Otherwise, the lower 7 bits of the first byte indicate the number of bytes
+    // that follow to encode the length.
+    const byteCount = buf & 0x7f;
+    // Ensure the encoded length can safely fit in a JS number.
+    if (byteCount > 6) {
+        throw new error_1.ASN1ParseError('length exceeds 6 byte limit');
+    }
+    // Iterate over the bytes that encode the length.
+    let len = 0;
+    for (let i = 0; i < byteCount; i++) {
+        len = len * 256 + stream.getUint8();
+    }
+    // This is a valid ASN.1 length encoding, but we don't support it.
+    if (len === 0) {
+        throw new error_1.ASN1ParseError('indefinite length encoding not supported');
+    }
+    return len;
+}
+exports.decodeLength = decodeLength;
+// Translates the supplied value to a DER-encoded length.
+function encodeLength(len) {
+    if (len < 128) {
+        return Buffer.from([len]);
+    }
+    // Bitwise operations on large numbers are not supported in JS, so we need to
+    // use BigInts.
+    let val = BigInt(len);
+    const bytes = [];
+    while (val > 0n) {
+        bytes.unshift(Number(val & 255n));
+        val = val >> 8n;
+    }
+    return Buffer.from([0x80 | bytes.length, ...bytes]);
+}
+exports.encodeLength = encodeLength;
diff --git a/node_modules/sigstore/dist/x509/asn1/obj.d.ts b/node_modules/sigstore/dist/x509/asn1/obj.d.ts
new file mode 100644
index 0000000000000..7f70a0ac3047b
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/obj.d.ts
@@ -0,0 +1,18 @@
+/// <reference types="node" />
+import { ASN1Tag } from './tag';
+export declare class ASN1Obj {
+    readonly tag: ASN1Tag;
+    readonly subs: ASN1Obj[];
+    private buf;
+    private headerLength;
+    constructor(tag: ASN1Tag, headerLength: number, buf: Buffer, subs: ASN1Obj[]);
+    static parseBuffer(buf: Buffer): ASN1Obj;
+    get value(): Buffer;
+    get raw(): Buffer;
+    toDER(): Buffer;
+    toBoolean(): boolean;
+    toInteger(): bigint;
+    toOID(): string;
+    toDate(): Date;
+    toBitString(): number[];
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/obj.js b/node_modules/sigstore/dist/x509/asn1/obj.js
new file mode 100644
index 0000000000000..9e67edcf60a0c
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/obj.js
@@ -0,0 +1,166 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const stream_1 = require("../../util/stream");
+const error_1 = require("./error");
+const length_1 = require("./length");
+const parse_1 = require("./parse");
+const tag_1 = require("./tag");
+class ASN1Obj {
+    constructor(tag, headerLength, buf, subs) {
+        this.tag = tag;
+        this.headerLength = headerLength;
+        this.buf = buf;
+        this.subs = subs;
+    }
+    // Constructs an ASN.1 object from a Buffer of DER-encoded bytes.
+    static parseBuffer(buf) {
+        return parseStream(new stream_1.ByteStream(buf));
+    }
+    // Returns the raw bytes of the ASN.1 object's value. For constructed objects,
+    // this is the concatenation of the raw bytes of the values of its children.
+    // For primitive objects, this is the raw bytes of the object's value.
+    // Use the various to* methods to parse the value into a specific type.
+    get value() {
+        return this.buf.subarray(this.headerLength);
+    }
+    // Returns the raw bytes of the entire ASN.1 object (including tag, length,
+    // and value)
+    get raw() {
+        return this.buf;
+    }
+    toDER() {
+        const valueStream = new stream_1.ByteStream();
+        if (this.subs.length > 0) {
+            for (const sub of this.subs) {
+                valueStream.appendView(sub.toDER());
+            }
+        }
+        else {
+            valueStream.appendView(this.value);
+        }
+        const value = valueStream.buffer;
+        // Concat tag/length/value
+        const obj = new stream_1.ByteStream();
+        obj.appendChar(this.tag.toDER());
+        obj.appendView((0, length_1.encodeLength)(value.length));
+        obj.appendView(value);
+        return obj.buffer;
+    }
+    /////////////////////////////////////////////////////////////////////////////
+    // Convenience methods for parsing ASN.1 primitives into JS types
+    // Returns the ASN.1 object's value as a boolean. Throws an error if the
+    // object is not a boolean.
+    toBoolean() {
+        if (!this.tag.isBoolean()) {
+            throw new error_1.ASN1TypeError('not a boolean');
+        }
+        return (0, parse_1.parseBoolean)(this.value);
+    }
+    // Returns the ASN.1 object's value as a BigInt. Throws an error if the
+    // object is not an integer.
+    toInteger() {
+        if (!this.tag.isInteger()) {
+            throw new error_1.ASN1TypeError('not an integer');
+        }
+        return (0, parse_1.parseInteger)(this.value);
+    }
+    // Returns the ASN.1 object's value as an OID string. Throws an error if the
+    // object is not an OID.
+    toOID() {
+        if (!this.tag.isOID()) {
+            throw new error_1.ASN1TypeError('not an OID');
+        }
+        return (0, parse_1.parseOID)(this.value);
+    }
+    // Returns the ASN.1 object's value as a Date. Throws an error if the object
+    // is not either a UTCTime or a GeneralizedTime.
+    toDate() {
+        switch (true) {
+            case this.tag.isUTCTime():
+                return (0, parse_1.parseTime)(this.value, true);
+            case this.tag.isGeneralizedTime():
+                return (0, parse_1.parseTime)(this.value, false);
+            default:
+                throw new error_1.ASN1TypeError('not a date');
+        }
+    }
+    // Returns the ASN.1 object's value as a number[] where each number is the
+    // value of a bit in the bit string. Throws an error if the object is not a
+    // bit string.
+    toBitString() {
+        if (!this.tag.isBitString()) {
+            throw new error_1.ASN1TypeError('not a bit string');
+        }
+        return (0, parse_1.parseBitString)(this.value);
+    }
+}
+exports.ASN1Obj = ASN1Obj;
+/////////////////////////////////////////////////////////////////////////////
+// Internal stream parsing functions
+function parseStream(stream) {
+    // Capture current stream position so we know where this object starts
+    const startPos = stream.position;
+    // Parse tag and length from stream
+    const tag = new tag_1.ASN1Tag(stream.getUint8());
+    const len = (0, length_1.decodeLength)(stream);
+    // Calculate length of header (tag + length)
+    const header = stream.position - startPos;
+    let subs = [];
+    // If the object is constructed, parse its children. Sometimes, children
+    // are embedded in OCTESTRING objects, so we need to check those
+    // for children as well.
+    if (tag.constructed) {
+        subs = collectSubs(stream, len);
+    }
+    else if (tag.isOctetString()) {
+        // Attempt to parse children of OCTETSTRING objects. If anything fails,
+        // assume the object is not constructed and treat as primitive.
+        try {
+            subs = collectSubs(stream, len);
+        }
+        catch (e) {
+            // Fail silently and treat as primitive
+        }
+    }
+    // If there are no children, move stream cursor to the end of the object
+    if (subs.length === 0) {
+        stream.seek(startPos + header + len);
+    }
+    // Capture the raw bytes of the object (including tag, length, and value)
+    const buf = stream.slice(startPos, header + len);
+    return new ASN1Obj(tag, header, buf, subs);
+}
+function collectSubs(stream, len) {
+    // Calculate end of object content
+    const end = stream.position + len;
+    // Make sure there are enough bytes left in the stream
+    if (end > stream.length) {
+        throw new error_1.ASN1ParseError('invalid length');
+    }
+    // Parse all children
+    const subs = [];
+    while (stream.position < end) {
+        subs.push(parseStream(stream));
+    }
+    // When we're done parsing children, we should be at the end of the object
+    if (stream.position !== end) {
+        throw new error_1.ASN1ParseError('invalid length');
+    }
+    return subs;
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/parse.d.ts b/node_modules/sigstore/dist/x509/asn1/parse.d.ts
new file mode 100644
index 0000000000000..35989d5510e26
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/parse.d.ts
@@ -0,0 +1,7 @@
+/// <reference types="node" />
+export declare function parseInteger(buf: Buffer): bigint;
+export declare function parseStringASCII(buf: Buffer): string;
+export declare function parseTime(buf: Buffer, shortYear: boolean): Date;
+export declare function parseOID(buf: Buffer): string;
+export declare function parseBoolean(buf: Buffer): boolean;
+export declare function parseBitString(buf: Buffer): number[];
diff --git a/node_modules/sigstore/dist/x509/asn1/parse.js b/node_modules/sigstore/dist/x509/asn1/parse.js
new file mode 100644
index 0000000000000..ad50a8f1abdc2
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/parse.js
@@ -0,0 +1,125 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseBitString = exports.parseBoolean = exports.parseOID = exports.parseTime = exports.parseStringASCII = exports.parseInteger = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})Z$/;
+const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})Z$/;
+// Parse a BigInt from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
+function parseInteger(buf) {
+    let pos = 0;
+    const end = buf.length;
+    let val = buf[pos];
+    const neg = val > 0x7f;
+    // Consume any padding bytes
+    const pad = neg ? 0xff : 0x00;
+    while (val == pad && ++pos < end) {
+        val = buf[pos];
+    }
+    // Calculate remaining bytes to read
+    const len = end - pos;
+    if (len === 0)
+        return BigInt(neg ? -1 : 0);
+    // Handle two's complement for negative numbers
+    val = neg ? val - 256 : val;
+    // Parse remaining bytes
+    let n = BigInt(val);
+    for (let i = pos + 1; i < end; ++i) {
+        n = n * BigInt(256) + BigInt(buf[i]);
+    }
+    return n;
+}
+exports.parseInteger = parseInteger;
+// Parse an ASCII string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseStringASCII(buf) {
+    return buf.toString('ascii');
+}
+exports.parseStringASCII = parseStringASCII;
+// Parse a Date from the DER-encoded buffer
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
+function parseTime(buf, shortYear) {
+    const timeStr = parseStringASCII(buf);
+    // Parse the time string into matches - captured groups start at index 1
+    const m = shortYear
+        ? RE_TIME_SHORT_YEAR.exec(timeStr)
+        : RE_TIME_LONG_YEAR.exec(timeStr);
+    if (!m) {
+        throw new Error('invalid time');
+    }
+    // Translate dates with a 2-digit year to 4 digits per the spec
+    if (shortYear) {
+        let year = Number(m[1]);
+        year += year >= 50 ? 1900 : 2000;
+        m[1] = year.toString();
+    }
+    // Translate to ISO8601 format and parse
+    return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
+}
+exports.parseTime = parseTime;
+// Parse an OID from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
+function parseOID(buf) {
+    let pos = 0;
+    const end = buf.length;
+    // Consume first byte which encodes the first two OID components
+    let n = buf[pos++];
+    const first = Math.floor(n / 40);
+    const second = n % 40;
+    let oid = `${first}.${second}`;
+    // Consume remaining bytes
+    let val = 0;
+    for (; pos < end; ++pos) {
+        n = buf[pos];
+        val = (val << 7) + (n & 0x7f);
+        // If the left-most bit is NOT set, then this is the last byte in the
+        // sequence and we can add the value to the OID and reset the accumulator
+        if ((n & 0x80) === 0) {
+            oid += `.${val}`;
+            val = 0;
+        }
+    }
+    return oid;
+}
+exports.parseOID = parseOID;
+// Parse a boolean from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseBoolean(buf) {
+    return buf[0] !== 0;
+}
+exports.parseBoolean = parseBoolean;
+// Parse a bit string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
+function parseBitString(buf) {
+    // First byte tell us how many unused bits are in the last byte
+    const unused = buf[0];
+    const start = 1;
+    const end = buf.length;
+    const bits = [];
+    for (let i = start; i < end; ++i) {
+        const byte = buf[i];
+        // The skip value is only used for the last byte
+        const skip = i === end - 1 ? unused : 0;
+        // Iterate over each bit in the byte (most significant first)
+        for (let j = 7; j >= skip; --j) {
+            // Read the bit and add it to the bit string
+            bits.push((byte >> j) & 0x01);
+        }
+    }
+    return bits;
+}
+exports.parseBitString = parseBitString;
diff --git a/node_modules/sigstore/dist/x509/asn1/tag.d.ts b/node_modules/sigstore/dist/x509/asn1/tag.d.ts
new file mode 100644
index 0000000000000..cdc9a69097b38
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/tag.d.ts
@@ -0,0 +1,28 @@
+export declare const UNIVERSAL_TAG: {
+    BOOLEAN: number;
+    INTEGER: number;
+    BIT_STRING: number;
+    OCTET_STRING: number;
+    OBJECT_IDENTIFIER: number;
+    SEQUENCE: number;
+    SET: number;
+    PRINTABLE_STRING: number;
+    UTC_TIME: number;
+    GENERALIZED_TIME: number;
+};
+export declare class ASN1Tag {
+    readonly number: number;
+    readonly constructed: boolean;
+    readonly class: number;
+    constructor(enc: number);
+    isUniversal(): boolean;
+    isContextSpecific(num?: number): boolean;
+    isBoolean(): boolean;
+    isInteger(): boolean;
+    isBitString(): boolean;
+    isOctetString(): boolean;
+    isOID(): boolean;
+    isUTCTime(): boolean;
+    isGeneralizedTime(): boolean;
+    toDER(): number;
+}
diff --git a/node_modules/sigstore/dist/x509/asn1/tag.js b/node_modules/sigstore/dist/x509/asn1/tag.js
new file mode 100644
index 0000000000000..ecd4fd05362bb
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/asn1/tag.js
@@ -0,0 +1,86 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Tag = exports.UNIVERSAL_TAG = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("./error");
+exports.UNIVERSAL_TAG = {
+    BOOLEAN: 0x01,
+    INTEGER: 0x02,
+    BIT_STRING: 0x03,
+    OCTET_STRING: 0x04,
+    OBJECT_IDENTIFIER: 0x06,
+    SEQUENCE: 0x10,
+    SET: 0x11,
+    PRINTABLE_STRING: 0x13,
+    UTC_TIME: 0x17,
+    GENERALIZED_TIME: 0x18,
+};
+const TAG_CLASS = {
+    UNIVERSAL: 0x00,
+    APPLICATION: 0x01,
+    CONTEXT_SPECIFIC: 0x02,
+    PRIVATE: 0x03,
+};
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes
+class ASN1Tag {
+    constructor(enc) {
+        // Bits 0 through 4 are the tag number
+        this.number = enc & 0x1f;
+        // Bit 5 is the constructed bit
+        this.constructed = (enc & 0x20) === 0x20;
+        // Bit 6 & 7 are the class
+        this.class = enc >> 6;
+        if (this.number === 0x1f) {
+            throw new error_1.ASN1ParseError('long form tags not supported');
+        }
+        if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) {
+            throw new error_1.ASN1ParseError('unsupported tag 0x00');
+        }
+    }
+    isUniversal() {
+        return this.class === TAG_CLASS.UNIVERSAL;
+    }
+    isContextSpecific(num) {
+        const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC;
+        return num !== undefined ? res && this.number === num : res;
+    }
+    isBoolean() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.BOOLEAN;
+    }
+    isInteger() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.INTEGER;
+    }
+    isBitString() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.BIT_STRING;
+    }
+    isOctetString() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.OCTET_STRING;
+    }
+    isOID() {
+        return (this.isUniversal() && this.number === exports.UNIVERSAL_TAG.OBJECT_IDENTIFIER);
+    }
+    isUTCTime() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.UTC_TIME;
+    }
+    isGeneralizedTime() {
+        return this.isUniversal() && this.number === exports.UNIVERSAL_TAG.GENERALIZED_TIME;
+    }
+    toDER() {
+        return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6);
+    }
+}
+exports.ASN1Tag = ASN1Tag;
diff --git a/node_modules/sigstore/dist/x509/cert.d.ts b/node_modules/sigstore/dist/x509/cert.d.ts
new file mode 100644
index 0000000000000..6f0f2f37d4db4
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/cert.d.ts
@@ -0,0 +1,48 @@
+/// <reference types="node" />
+import * as sigstore from '../types/sigstore';
+import { ASN1Obj } from './asn1/obj';
+import { x509AuthorityKeyIDExtension, x509BasicConstraintsExtension, x509Extension, x509KeyUsageExtension, x509SCTExtension, x509SubjectAlternativeNameExtension, x509SubjectKeyIDExtension } from './ext';
+interface SCTVerificationResult {
+    verified: boolean;
+    logID: Buffer;
+}
+export declare class x509Certificate {
+    root: ASN1Obj;
+    constructor(asn1: ASN1Obj);
+    static parse(cert: Buffer | string): x509Certificate;
+    get tbsCertificate(): ASN1Obj;
+    get version(): string;
+    get notBefore(): Date;
+    get notAfter(): Date;
+    get issuer(): Buffer;
+    get subject(): Buffer;
+    get publicKey(): Buffer;
+    get signatureAlgorithm(): string;
+    get signatureValue(): Buffer;
+    get extensions(): ASN1Obj[];
+    get extKeyUsage(): x509KeyUsageExtension | undefined;
+    get extBasicConstraints(): x509BasicConstraintsExtension | undefined;
+    get extSubjectAltName(): x509SubjectAlternativeNameExtension | undefined;
+    get extAuthorityKeyID(): x509AuthorityKeyIDExtension | undefined;
+    get extSubjectKeyID(): x509SubjectKeyIDExtension | undefined;
+    get extSCT(): x509SCTExtension | undefined;
+    get isCA(): boolean;
+    extension(oid: string): x509Extension | undefined;
+    verify(issuerCertificate?: x509Certificate): boolean;
+    validForDate(date: Date): boolean;
+    equals(other: x509Certificate): boolean;
+    verifySCTs(issuer: x509Certificate, logs: sigstore.TransparencyLogInstance[]): SCTVerificationResult[];
+    private clone;
+    private findExtension;
+    private checkRecognizedExtensions;
+    private get tbsCertificateObj();
+    private get signatureAlgorithmObj();
+    private get signatureValueObj();
+    private get versionObj();
+    private get issuerObj();
+    private get validityObj();
+    private get subjectObj();
+    private get subjectPublicKeyInfoObj();
+    private get extensionsObj();
+}
+export {};
diff --git a/node_modules/sigstore/dist/x509/cert.js b/node_modules/sigstore/dist/x509/cert.js
new file mode 100644
index 0000000000000..55cf22f62e27e
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/cert.js
@@ -0,0 +1,241 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.x509Certificate = void 0;
+const util_1 = require("../util");
+const stream_1 = require("../util/stream");
+const obj_1 = require("./asn1/obj");
+const ext_1 = require("./ext");
+const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14';
+const EXTENSION_OID_KEY_USAGE = '2.5.29.15';
+const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17';
+const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19';
+const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35';
+const EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2';
+// List of recognized critical extensions
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2
+const RECOGNIZED_EXTENSIONS = [
+    EXTENSION_OID_KEY_USAGE,
+    EXTENSION_OID_BASIC_CONSTRAINTS,
+    EXTENSION_OID_SUBJECT_ALT_NAME,
+];
+const ECDSA_SIGNATURE_ALGOS = {
+    '1.2.840.10045.4.3.1': 'sha224',
+    '1.2.840.10045.4.3.2': 'sha256',
+    '1.2.840.10045.4.3.3': 'sha384',
+    '1.2.840.10045.4.3.4': 'sha512',
+};
+class x509Certificate {
+    constructor(asn1) {
+        this.root = asn1;
+        if (!this.checkRecognizedExtensions()) {
+            throw new Error('Certificate contains unrecognized critical extensions');
+        }
+    }
+    static parse(cert) {
+        const der = typeof cert === 'string' ? util_1.pem.toDER(cert) : cert;
+        const asn1 = obj_1.ASN1Obj.parseBuffer(der);
+        return new x509Certificate(asn1);
+    }
+    get tbsCertificate() {
+        return this.tbsCertificateObj;
+    }
+    get version() {
+        // version number is the first element of the version context specific tag
+        const ver = this.versionObj.subs[0].toInteger();
+        return `v${(ver + BigInt(1)).toString()}`;
+    }
+    get notBefore() {
+        // notBefore is the first element of the validity sequence
+        return this.validityObj.subs[0].toDate();
+    }
+    get notAfter() {
+        // notAfter is the second element of the validity sequence
+        return this.validityObj.subs[1].toDate();
+    }
+    get issuer() {
+        return this.issuerObj.value;
+    }
+    get subject() {
+        return this.subjectObj.value;
+    }
+    get publicKey() {
+        return this.subjectPublicKeyInfoObj.raw;
+    }
+    get signatureAlgorithm() {
+        const oid = this.signatureAlgorithmObj.subs[0].toOID();
+        return ECDSA_SIGNATURE_ALGOS[oid];
+    }
+    get signatureValue() {
+        // Signature value is a bit string, so we need to skip the first byte
+        return this.signatureValueObj.value.subarray(1);
+    }
+    get extensions() {
+        // The extension list is the first (and only) element of the extensions
+        // context specific tag
+        const extSeq = this.extensionsObj?.subs[0];
+        return extSeq?.subs || [];
+    }
+    get extKeyUsage() {
+        const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
+        return ext ? new ext_1.x509KeyUsageExtension(ext) : undefined;
+    }
+    get extBasicConstraints() {
+        const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS);
+        return ext ? new ext_1.x509BasicConstraintsExtension(ext) : undefined;
+    }
+    get extSubjectAltName() {
+        const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME);
+        return ext ? new ext_1.x509SubjectAlternativeNameExtension(ext) : undefined;
+    }
+    get extAuthorityKeyID() {
+        const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID);
+        return ext ? new ext_1.x509AuthorityKeyIDExtension(ext) : undefined;
+    }
+    get extSubjectKeyID() {
+        const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID);
+        return ext ? new ext_1.x509SubjectKeyIDExtension(ext) : undefined;
+    }
+    get extSCT() {
+        const ext = this.findExtension(EXTENSION_OID_SCT);
+        return ext ? new ext_1.x509SCTExtension(ext) : undefined;
+    }
+    get isCA() {
+        const ca = this.extBasicConstraints?.isCA || false;
+        // If the KeyUsage extension is present, keyCertSign must be set
+        if (this.extKeyUsage) {
+            ca && this.extKeyUsage.keyCertSign;
+        }
+        return ca;
+    }
+    extension(oid) {
+        const ext = this.findExtension(oid);
+        return ext ? new ext_1.x509Extension(ext) : undefined;
+    }
+    verify(issuerCertificate) {
+        // Use the issuer's public key if provided, otherwise use the subject's
+        const publicKey = issuerCertificate?.publicKey || this.publicKey;
+        const key = util_1.crypto.createPublicKey(publicKey);
+        return util_1.crypto.verifyBlob(this.tbsCertificate.raw, key, this.signatureValue, this.signatureAlgorithm);
+    }
+    validForDate(date) {
+        return this.notBefore <= date && date <= this.notAfter;
+    }
+    equals(other) {
+        return this.root.raw.equals(other.root.raw);
+    }
+    verifySCTs(issuer, logs) {
+        let extSCT;
+        // Verifying the SCT requires that we remove the SCT extension and
+        // re-encode the TBS structure to DER -- this value is part of the data
+        // over which the signature is calculated. Since this is a destructive action
+        // we create a copy of the certificate so we can remove the SCT extension
+        // without affecting the original certificate.
+        const clone = this.clone();
+        // Intentionally not using the findExtension method here because we want to
+        // remove the the SCT extension from the certificate before calculating the
+        // PreCert structure
+        for (let i = 0; i < clone.extensions.length; i++) {
+            const ext = clone.extensions[i];
+            if (ext.subs[0].toOID() === EXTENSION_OID_SCT) {
+                extSCT = new ext_1.x509SCTExtension(ext);
+                // Remove the extension from the certificate
+                clone.extensions.splice(i, 1);
+                break;
+            }
+        }
+        if (!extSCT) {
+            throw new Error('Certificate does not contain SCT extension');
+        }
+        if (extSCT?.signedCertificateTimestamps?.length === 0) {
+            throw new Error('Certificate does not contain any SCTs');
+        }
+        // Construct the PreCert structure
+        // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+        const preCert = new stream_1.ByteStream();
+        // Calculate hash of the issuer's public key
+        const issuerId = util_1.crypto.hash(issuer.publicKey);
+        preCert.appendView(issuerId);
+        // Re-encodes the certificate to DER after removing the SCT extension
+        const tbs = clone.tbsCertificate.toDER();
+        preCert.appendUint24(tbs.length);
+        preCert.appendView(tbs);
+        // Calculate and return the verification results for each SCT
+        return extSCT.signedCertificateTimestamps.map((sct) => ({
+            logID: sct.logID,
+            verified: sct.verify(preCert.buffer, logs),
+        }));
+    }
+    // Creates a copy of the certificate with a new buffer
+    clone() {
+        const clone = Buffer.alloc(this.root.raw.length);
+        this.root.raw.copy(clone);
+        return x509Certificate.parse(clone);
+    }
+    findExtension(oid) {
+        // Find the extension with the given OID. The OID will always be the first
+        // element of the extension sequence
+        return this.extensions.find((ext) => ext.subs[0].toOID() === oid);
+    }
+    // A certificate should be considered invalid if it contains critical
+    // extensions that are not recognized
+    checkRecognizedExtensions() {
+        // The extension list is the first (and only) element of the extensions
+        // context specific tag
+        const extSeq = this.extensionsObj?.subs[0];
+        const exts = extSeq?.subs.map((ext) => new ext_1.x509Extension(ext));
+        // Check for unrecognized critical extensions
+        return (!exts ||
+            exts.every((ext) => !ext.critical || RECOGNIZED_EXTENSIONS.includes(ext.oid)));
+    }
+    /////////////////////////////////////////////////////////////////////////////
+    // The following properties use the documented x509 structure to locate the
+    // desired ASN.1 object
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1
+    get tbsCertificateObj() {
+        // tbsCertificate is the first element of the certificate sequence
+        return this.root.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2
+    get signatureAlgorithmObj() {
+        // signatureAlgorithm is the second element of the certificate sequence
+        return this.root.subs[1];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3
+    get signatureValueObj() {
+        // signatureValue is the third element of the certificate sequence
+        return this.root.subs[2];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1
+    get versionObj() {
+        // version is the first element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4
+    get issuerObj() {
+        // issuer is the fourth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[3];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5
+    get validityObj() {
+        // version is the fifth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[4];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6
+    get subjectObj() {
+        // subject is the sixth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[5];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7
+    get subjectPublicKeyInfoObj() {
+        // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[6];
+    }
+    // Extensions can't be located by index because their position varies. Instead,
+    // we need to find the extensions context specific tag
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9
+    get extensionsObj() {
+        return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03));
+    }
+}
+exports.x509Certificate = x509Certificate;
diff --git a/node_modules/sigstore/dist/x509/ext.d.ts b/node_modules/sigstore/dist/x509/ext.d.ts
new file mode 100644
index 0000000000000..04e54606049fb
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/ext.d.ts
@@ -0,0 +1,41 @@
+/// <reference types="node" />
+import { ASN1Obj } from './asn1/obj';
+import { SignedCertificateTimestamp } from './sct';
+export declare class x509Extension {
+    protected root: ASN1Obj;
+    constructor(asn1: ASN1Obj);
+    get oid(): string;
+    get critical(): boolean;
+    get value(): Buffer;
+    protected get extnValueObj(): ASN1Obj;
+}
+export declare class x509BasicConstraintsExtension extends x509Extension {
+    get isCA(): boolean;
+    get pathLenConstraint(): bigint | undefined;
+    private get sequence();
+}
+export declare class x509KeyUsageExtension extends x509Extension {
+    get digitalSignature(): boolean;
+    get keyCertSign(): boolean;
+    get crlSign(): boolean;
+    private get bitString();
+}
+export declare class x509SubjectAlternativeNameExtension extends x509Extension {
+    get rfc822Name(): string | undefined;
+    get uri(): string | undefined;
+    otherName(oid: string): string | undefined;
+    private findGeneralName;
+    private get generalNames();
+}
+export declare class x509AuthorityKeyIDExtension extends x509Extension {
+    get keyIdentifier(): Buffer | undefined;
+    private findSequenceMember;
+    private get sequence();
+}
+export declare class x509SubjectKeyIDExtension extends x509Extension {
+    get keyIdentifier(): Buffer;
+}
+export declare class x509SCTExtension extends x509Extension {
+    constructor(asn1: ASN1Obj);
+    get signedCertificateTimestamps(): SignedCertificateTimestamp[];
+}
diff --git a/node_modules/sigstore/dist/x509/ext.js b/node_modules/sigstore/dist/x509/ext.js
new file mode 100644
index 0000000000000..caed59247ea4b
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/ext.js
@@ -0,0 +1,157 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.x509SCTExtension = exports.x509SubjectKeyIDExtension = exports.x509AuthorityKeyIDExtension = exports.x509SubjectAlternativeNameExtension = exports.x509KeyUsageExtension = exports.x509BasicConstraintsExtension = exports.x509Extension = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const stream_1 = require("../util/stream");
+const sct_1 = require("./sct");
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+class x509Extension {
+    constructor(asn1) {
+        this.root = asn1;
+    }
+    get oid() {
+        return this.root.subs[0].toOID();
+    }
+    get critical() {
+        // The critical field is optional and will be the second element of the
+        // extension sequence if present. Default to false if not present.
+        return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false;
+    }
+    get value() {
+        return this.extnValueObj.value;
+    }
+    get extnValueObj() {
+        // The extnValue field will be the last element of the extension sequence
+        return this.root.subs[this.root.subs.length - 1];
+    }
+}
+exports.x509Extension = x509Extension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9
+class x509BasicConstraintsExtension extends x509Extension {
+    get isCA() {
+        return this.sequence.subs[0].toBoolean();
+    }
+    get pathLenConstraint() {
+        return this.sequence.subs.length > 1
+            ? this.sequence.subs[1].toInteger()
+            : undefined;
+    }
+    // The extnValue field contains a single sequence wrapping the isCA and
+    // pathLenConstraint.
+    get sequence() {
+        return this.extnValueObj.subs[0];
+    }
+}
+exports.x509BasicConstraintsExtension = x509BasicConstraintsExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3
+class x509KeyUsageExtension extends x509Extension {
+    get digitalSignature() {
+        return this.bitString[0] === 1;
+    }
+    get keyCertSign() {
+        return this.bitString[5] === 1;
+    }
+    get crlSign() {
+        return this.bitString[6] === 1;
+    }
+    // The extnValue field contains a single bit string which is a bit mask
+    // indicating which key usages are enabled.
+    get bitString() {
+        return this.extnValueObj.subs[0].toBitString();
+    }
+}
+exports.x509KeyUsageExtension = x509KeyUsageExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6
+class x509SubjectAlternativeNameExtension extends x509Extension {
+    get rfc822Name() {
+        return this.findGeneralName(0x01)?.value.toString('ascii');
+    }
+    get uri() {
+        return this.findGeneralName(0x06)?.value.toString('ascii');
+    }
+    // Retrieve the value of an otherName with the given OID.
+    otherName(oid) {
+        const otherName = this.findGeneralName(0x00);
+        if (otherName === undefined) {
+            return undefined;
+        }
+        // The otherName is a sequence containing an OID and a value.
+        // Need to check that the OID matches the one we're looking for.
+        const otherNameOID = otherName.subs[0].toOID();
+        if (otherNameOID !== oid) {
+            return undefined;
+        }
+        // The otherNameValue is a sequence containing the actual value.
+        const otherNameValue = otherName.subs[1];
+        return otherNameValue.subs[0].value.toString('ascii');
+    }
+    findGeneralName(tag) {
+        return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag));
+    }
+    // The extnValue field contains a sequence of GeneralNames.
+    get generalNames() {
+        return this.extnValueObj.subs[0].subs;
+    }
+}
+exports.x509SubjectAlternativeNameExtension = x509SubjectAlternativeNameExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1
+class x509AuthorityKeyIDExtension extends x509Extension {
+    get keyIdentifier() {
+        return this.findSequenceMember(0x00)?.value;
+    }
+    findSequenceMember(tag) {
+        return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag));
+    }
+    // The extnValue field contains a single sequence wrapping the keyIdentifier
+    get sequence() {
+        return this.extnValueObj.subs[0];
+    }
+}
+exports.x509AuthorityKeyIDExtension = x509AuthorityKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2
+class x509SubjectKeyIDExtension extends x509Extension {
+    get keyIdentifier() {
+        return this.extnValueObj.subs[0].value;
+    }
+}
+exports.x509SubjectKeyIDExtension = x509SubjectKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc6962#section-3.3
+class x509SCTExtension extends x509Extension {
+    constructor(asn1) {
+        super(asn1);
+    }
+    get signedCertificateTimestamps() {
+        const buf = this.extnValueObj.subs[0].value;
+        const stream = new stream_1.ByteStream(buf);
+        // The overall list length is encoded in the first two bytes -- note this
+        // is the length of the list in bytes, NOT the number of SCTs in the list
+        const end = stream.getUint16() + 2;
+        const sctList = [];
+        while (stream.position < end) {
+            // Read the length of the next SCT
+            const sctLength = stream.getUint16();
+            // Slice out the bytes for the next SCT and parse it
+            const sct = stream.getBlock(sctLength);
+            sctList.push(sct_1.SignedCertificateTimestamp.parse(sct));
+        }
+        if (stream.position !== end) {
+            throw new Error('SCT list length does not match actual length');
+        }
+        return sctList;
+    }
+}
+exports.x509SCTExtension = x509SCTExtension;
diff --git a/node_modules/sigstore/dist/x509/sct.d.ts b/node_modules/sigstore/dist/x509/sct.d.ts
new file mode 100644
index 0000000000000..076a532984c6b
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/sct.d.ts
@@ -0,0 +1,26 @@
+/// <reference types="node" />
+import * as sigstore from '../types/sigstore';
+interface SCTOptions {
+    version: number;
+    logID: Buffer;
+    timestamp: Buffer;
+    extensions: Buffer;
+    hashAlgorithm: number;
+    signatureAlgorithm: number;
+    signature: Buffer;
+}
+export declare class SignedCertificateTimestamp {
+    readonly version: number;
+    readonly logID: Buffer;
+    readonly timestamp: Buffer;
+    readonly extensions: Buffer;
+    readonly hashAlgorithm: number;
+    readonly signatureAlgorithm: number;
+    readonly signature: Buffer;
+    constructor(options: SCTOptions);
+    get datetime(): Date;
+    get algorithm(): string;
+    verify(preCert: Buffer, logs: sigstore.TransparencyLogInstance[]): boolean;
+    static parse(buf: Buffer): SignedCertificateTimestamp;
+}
+export {};
diff --git a/node_modules/sigstore/dist/x509/sct.js b/node_modules/sigstore/dist/x509/sct.js
new file mode 100644
index 0000000000000..72528dd3a2077
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/sct.js
@@ -0,0 +1,101 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SignedCertificateTimestamp = void 0;
+const util_1 = require("../util");
+const stream_1 = require("../util/stream");
+class SignedCertificateTimestamp {
+    constructor(options) {
+        this.version = options.version;
+        this.logID = options.logID;
+        this.timestamp = options.timestamp;
+        this.extensions = options.extensions;
+        this.hashAlgorithm = options.hashAlgorithm;
+        this.signatureAlgorithm = options.signatureAlgorithm;
+        this.signature = options.signature;
+    }
+    get datetime() {
+        return new Date(Number(this.timestamp.readBigInt64BE()));
+    }
+    // Returns the hash algorithm used to generate the SCT's signature.
+    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+    get algorithm() {
+        switch (this.hashAlgorithm) {
+            case 0:
+                return 'none';
+            case 1:
+                return 'md5';
+            case 2:
+                return 'sha1';
+            case 3:
+                return 'sha224';
+            case 4:
+                return 'sha256';
+            case 5:
+                return 'sha384';
+            case 6:
+                return 'sha512';
+            default:
+                return 'unknown';
+        }
+    }
+    verify(preCert, logs) {
+        // Find key for the log reponsible for this signature
+        const log = logs.find((log) => log.logId?.keyId.equals(this.logID));
+        if (!log?.publicKey?.rawBytes) {
+            throw new Error(`No key found for log: ${this.logID.toString('base64')}`);
+        }
+        const publicKey = util_1.crypto.createPublicKey(log.publicKey.rawBytes);
+        // Assemble the digitally-signed struct (the data over which the signature
+        // was generated).
+        // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+        const stream = new stream_1.ByteStream();
+        stream.appendChar(this.version);
+        stream.appendChar(0x00); // SignatureType = certificate_timestamp(0)
+        stream.appendView(this.timestamp);
+        stream.appendUint16(0x01); // LogEntryType = precert_entry(1)
+        stream.appendView(preCert);
+        stream.appendUint16(this.extensions.byteLength);
+        if (this.extensions.byteLength > 0) {
+            stream.appendView(this.extensions);
+        }
+        return util_1.crypto.verifyBlob(stream.buffer, publicKey, this.signature, this.algorithm);
+    }
+    // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using
+    // TLS encoding which means the fields and lengths of most fields are
+    // specified as part of the SCT and TLS specs.
+    // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+    static parse(buf) {
+        const stream = new stream_1.ByteStream(buf);
+        // Version - enum { v1(0), (255) }
+        const version = stream.getUint8();
+        // Log ID  - struct { opaque key_id[32]; }
+        const logID = stream.getBlock(32);
+        // Timestamp - uint64
+        const timestamp = stream.getBlock(8);
+        // Extensions - opaque extensions<0..2^16-1>;
+        const extenstionLength = stream.getUint16();
+        const extensions = stream.getBlock(extenstionLength);
+        // Hash algo - enum { sha256(4), . . . (255) }
+        const hashAlgorithm = stream.getUint8();
+        // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) }
+        const signatureAlgorithm = stream.getUint8();
+        // Signature  - opaque signature<0..2^16-1>;
+        const sigLength = stream.getUint16();
+        const signature = stream.getBlock(sigLength);
+        // Check that we read the entire buffer
+        if (stream.position !== buf.length) {
+            throw new Error('SCT buffer length mismatch');
+        }
+        return new SignedCertificateTimestamp({
+            version,
+            logID,
+            timestamp,
+            extensions,
+            hashAlgorithm,
+            signatureAlgorithm,
+            signature,
+        });
+    }
+}
+exports.SignedCertificateTimestamp = SignedCertificateTimestamp;
diff --git a/node_modules/sigstore/dist/x509/verify.d.ts b/node_modules/sigstore/dist/x509/verify.d.ts
new file mode 100644
index 0000000000000..04c324dca01c0
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/verify.d.ts
@@ -0,0 +1,8 @@
+import { x509Certificate } from './cert';
+interface VerifyCertificateChainOptions {
+    trustedCerts: x509Certificate[];
+    certs: x509Certificate[];
+    validAt?: Date;
+}
+export declare function verifyCertificateChain(opts: VerifyCertificateChainOptions): x509Certificate[];
+export {};
diff --git a/node_modules/sigstore/dist/x509/verify.js b/node_modules/sigstore/dist/x509/verify.js
new file mode 100644
index 0000000000000..cc34a9ea23abe
--- /dev/null
+++ b/node_modules/sigstore/dist/x509/verify.js
@@ -0,0 +1,159 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCertificateChain = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+function verifyCertificateChain(opts) {
+    const verifier = new CertificateChainVerifier(opts);
+    return verifier.verify();
+}
+exports.verifyCertificateChain = verifyCertificateChain;
+class CertificateChainVerifier {
+    constructor(opts) {
+        this.certs = opts.certs;
+        this.trustedCerts = opts.trustedCerts;
+        this.localCerts = dedupeCertificates([...opts.trustedCerts, ...opts.certs]);
+        this.validAt = opts.validAt || new Date();
+    }
+    verify() {
+        if (this.certs.length === 0) {
+            throw new error_1.VerificationError('No certificates provided');
+        }
+        // Construct certificate path from leaf to root
+        const certificatePath = this.sort();
+        // Perform validation checks on each certificate in the path
+        this.checkPath(certificatePath);
+        // Return verified certificate path
+        return certificatePath;
+    }
+    sort() {
+        const leafCert = this.localCerts[this.localCerts.length - 1];
+        // Construct all possible paths from the leaf
+        let paths = this.buildPaths(leafCert);
+        // Filter for paths which contain a trusted certificate
+        paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert)));
+        if (paths.length === 0) {
+            throw new error_1.VerificationError('No trusted certificate path found');
+        }
+        // Find the shortest of possible paths
+        const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr);
+        // Construct chain from shortest path
+        return [leafCert, ...path];
+    }
+    // Recursively build all possible paths from the leaf to the root
+    buildPaths(certificate) {
+        const paths = [];
+        const issuers = this.findIssuer(certificate);
+        if (issuers.length === 0) {
+            throw new error_1.VerificationError('No valid certificate path found');
+        }
+        for (let i = 0; i < issuers.length; i++) {
+            const issuer = issuers[i];
+            // Base case - issuer is self
+            if (issuer.equals(certificate)) {
+                paths.push([certificate]);
+                continue;
+            }
+            // Recursively build path for the issuer
+            const subPaths = this.buildPaths(issuer);
+            // Construct paths by appending the issuer to each subpath
+            for (let j = 0; j < subPaths.length; j++) {
+                paths.push([issuer, ...subPaths[j]]);
+            }
+        }
+        return paths;
+    }
+    // Return all possible issuers for the given certificate
+    findIssuer(certificate) {
+        let issuers = [];
+        let keyIdentifier;
+        // Exit early if the certificate is self-signed
+        if (certificate.subject.equals(certificate.issuer)) {
+            if (certificate.verify()) {
+                return [certificate];
+            }
+        }
+        // If the certificate has an authority key identifier, use that
+        // to find the issuer
+        if (certificate.extAuthorityKeyID) {
+            keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier;
+            // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber
+            // though Fulcio doesn't appear to use these
+        }
+        // Find possible issuers by comparing the authorityKeyID/subjectKeyID
+        // or issuer/subject. Potential issuers are added to the result array.
+        this.localCerts.forEach((possibleIssuer) => {
+            if (keyIdentifier) {
+                if (possibleIssuer.extSubjectKeyID) {
+                    if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) {
+                        issuers.push(possibleIssuer);
+                    }
+                    return;
+                }
+            }
+            // Fallback to comparing certificate issuer and subject if
+            // subjectKey/authorityKey extensions are not present
+            if (possibleIssuer.subject.equals(certificate.issuer)) {
+                issuers.push(possibleIssuer);
+            }
+        });
+        // Remove any issuers which fail to verify the certificate
+        issuers = issuers.filter((issuer) => {
+            try {
+                return certificate.verify(issuer);
+            }
+            catch (ex) {
+                return false;
+            }
+        });
+        return issuers;
+    }
+    checkPath(path) {
+        if (path.length < 2) {
+            throw new error_1.VerificationError('Certificate chain must contain at least two certificates');
+        }
+        // Check that all certificates are valid at the check date
+        const validForDate = path.every((cert) => cert.validForDate(this.validAt));
+        if (!validForDate) {
+            throw new error_1.VerificationError('Certificate is not valid or expired at the specified date');
+        }
+        // Ensure that all certificates beyond the leaf are CAs
+        const validCAs = path.slice(1).every((cert) => cert.isCA);
+        if (!validCAs) {
+            throw new error_1.VerificationError('Intermediate certificate is not a CA');
+        }
+        // Certificate's issuer must match the subject of the next certificate
+        // in the chain
+        for (let i = path.length - 2; i >= 0; i--) {
+            if (!path[i].issuer.equals(path[i + 1].subject)) {
+                throw new error_1.VerificationError('Incorrect certificate name chaining');
+            }
+        }
+    }
+}
+// Remove duplicate certificates from the array
+function dedupeCertificates(certs) {
+    for (let i = 0; i < certs.length; i++) {
+        for (let j = i + 1; j < certs.length; j++) {
+            if (certs[i].equals(certs[j])) {
+                certs.splice(j, 1);
+                j--;
+            }
+        }
+    }
+    return certs;
+}
diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json
new file mode 100644
index 0000000000000..1a5960822eb0e
--- /dev/null
+++ b/node_modules/sigstore/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "sigstore",
+  "version": "1.0.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc",
+    "test": "jest",
+    "test:watch": "jest --watch",
+    "test:ci": "jest --maxWorkers=2 --coverage",
+    "lint": "eslint --fix --ext .ts src/**",
+    "lint:check": "eslint --max-warnings 0 --ext .ts src/**",
+    "format": "prettier --write \"src/**/*\"",
+    "codegen:sigstore": "./hack/generate-sigstore-types",
+    "codegen:rekor": "./hack/generate-rekor-types"
+  },
+  "bin": {
+    "sigstore": "bin/sigstore.js"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "files": [
+    "dist",
+    "store"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/jest": "^29.4.0",
+    "@types/make-fetch-happen": "^10.0.0",
+    "@types/node": "^18.6.5",
+    "@typescript-eslint/eslint-plugin": "^5.26.0",
+    "@typescript-eslint/parser": "^5.26.0",
+    "eslint": "^8.16.0",
+    "eslint-config-prettier": "^8.5.0",
+    "eslint-plugin-prettier": "^4.0.0",
+    "jest": "^29.4.1",
+    "json-schema-to-typescript": "^11.0.2",
+    "nock": "^13.2.4",
+    "prettier": "^2.6.2",
+    "ts-jest": "^29.0.5",
+    "typescript": "^4.7.2"
+  },
+  "dependencies": {
+    "make-fetch-happen": "^11.0.1",
+    "tuf-js": "^1.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/sigstore/store/map.json b/node_modules/sigstore/store/map.json
new file mode 100644
index 0000000000000..620bf0bedbf44
--- /dev/null
+++ b/node_modules/sigstore/store/map.json
@@ -0,0 +1,19 @@
+{
+  "repositories": {
+    "public-good-instance": [
+      "https://sigstore-tuf-root.storage.googleapis.com"
+    ]
+  },
+  "mapping": [
+    {
+      "paths": [
+        "*"
+      ],
+      "repositories": [
+        "public-good-instance"
+      ],
+      "terminating": true,
+      "threshold": 1
+    }
+  ]
+}
diff --git a/node_modules/sigstore/store/public-good-instance-root.json b/node_modules/sigstore/store/public-good-instance-root.json
new file mode 100644
index 0000000000000..38f80f940473a
--- /dev/null
+++ b/node_modules/sigstore/store/public-good-instance-root.json
@@ -0,0 +1,156 @@
+{
+	"signed": {
+		"_type": "root",
+		"spec_version": "1.0",
+		"version": 5,
+		"expires": "2023-04-18T18:13:43Z",
+		"keys": {
+			"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
+				}
+			},
+			"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c": {
+				"keytype": "ecdsa-sha2-nistp256",
+				"scheme": "ecdsa-sha2-nistp256",
+				"keyid_hash_algorithms": [
+					"sha256",
+					"sha512"
+				],
+				"keyval": {
+					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
+				}
+			}
+		},
+		"roles": {
+			"root": {
+				"keyids": [
+					"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
+					"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
+					"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f",
+					"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
+					"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"
+				],
+				"threshold": 3
+			},
+			"snapshot": {
+				"keyids": [
+					"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"
+				],
+				"threshold": 1
+			},
+			"targets": {
+				"keyids": [
+					"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
+					"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
+					"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f",
+					"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
+					"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"
+				],
+				"threshold": 3
+			},
+			"timestamp": {
+				"keyids": [
+					"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"
+				],
+				"threshold": 1
+			}
+		},
+		"consistent_snapshot": true
+	},
+	"signatures": [
+		{
+			"keyid": "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
+			"sig": "3045022100fc1c2be509ce50ea917bbad1d9efe9d96c8c2ebea04af2717aa3d9c6fe617a75022012eef282a19f2d8bd4818aa333ef48a06489f49d4d34a20b8fe8fc867bb25a7a"
+		},
+		{
+			"keyid": "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
+			"sig": "30450221008a4392ae5057fc00778b651e61fea244766a4ae58db84d9f1d3810720ab0f3b702207c49e59e8031318caf02252ecea1281cecc1e5986c309a9cef61f455ecf7165d"
+		},
+		{
+			"keyid": "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
+			"sig": "3046022100da1b8dc5d53aaffbbfac98de3e23ee2d2ad3446a7bed09fac0f88bae19be2587022100b681c046afc3919097dfe794e0d819be891e2e850aade315bec06b0c4dea221b"
+		},
+		{
+			"keyid": "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de",
+			"sig": "3046022100b534e0030e1b271133ecfbdf3ba9fbf3becb3689abea079a2150afbb63cdb7c70221008c39a718fd9495f249b4ab8788d5b9dc269f0868dbe38b272f48207359d3ded9"
+		},
+		{
+			"keyid": "2f64fb5eac0cf94dd39bb45308b98920055e9a0d8e012a7220787834c60aef97",
+			"sig": "3045022100fc1c2be509ce50ea917bbad1d9efe9d96c8c2ebea04af2717aa3d9c6fe617a75022012eef282a19f2d8bd4818aa333ef48a06489f49d4d34a20b8fe8fc867bb25a7a"
+		},
+		{
+			"keyid": "eaf22372f417dd618a46f6c627dbc276e9fd30a004fc94f9be946e73f8bd090b",
+			"sig": "30450221008a4392ae5057fc00778b651e61fea244766a4ae58db84d9f1d3810720ab0f3b702207c49e59e8031318caf02252ecea1281cecc1e5986c309a9cef61f455ecf7165d"
+		},
+		{
+			"keyid": "f505595165a177a41750a8e864ed1719b1edfccd5a426fd2c0ffda33ce7ff209",
+			"sig": "3046022100da1b8dc5d53aaffbbfac98de3e23ee2d2ad3446a7bed09fac0f88bae19be2587022100b681c046afc3919097dfe794e0d819be891e2e850aade315bec06b0c4dea221b"
+		},
+		{
+			"keyid": "75e867ab10e121fdef32094af634707f43ddd79c6bab8ad6c5ab9f03f4ea8c90",
+			"sig": "3046022100b534e0030e1b271133ecfbdf3ba9fbf3becb3689abea079a2150afbb63cdb7c70221008c39a718fd9495f249b4ab8788d5b9dc269f0868dbe38b272f48207359d3ded9"
+		}
+	]
+}
\ No newline at end of file
diff --git a/node_modules/tuf-js/LICENSE b/node_modules/tuf-js/LICENSE
new file mode 100644
index 0000000000000..f28ab0914a319
--- /dev/null
+++ b/node_modules/tuf-js/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright GitHub
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/tuf-js/dist/error.d.ts b/node_modules/tuf-js/dist/error.d.ts
new file mode 100644
index 0000000000000..130e49ab9dfd2
--- /dev/null
+++ b/node_modules/tuf-js/dist/error.d.ts
@@ -0,0 +1,30 @@
+export declare class ValueError extends Error {
+}
+export declare class RuntimeError extends Error {
+}
+export declare class PersistError extends Error {
+}
+export declare class RepositoryError extends Error {
+}
+export declare class UnsignedMetadataError extends RepositoryError {
+}
+export declare class BadVersionError extends RepositoryError {
+}
+export declare class EqualVersionError extends BadVersionError {
+}
+export declare class ExpiredMetadataError extends RepositoryError {
+}
+export declare class LengthOrHashMismatchError extends RepositoryError {
+}
+export declare class CryptoError extends Error {
+}
+export declare class UnsupportedAlgorithmError extends CryptoError {
+}
+export declare class DownloadError extends Error {
+}
+export declare class DownloadLengthMismatchError extends DownloadError {
+}
+export declare class DownloadHTTPError extends DownloadError {
+    statusCode: number;
+    constructor(message: string, statusCode: number);
+}
diff --git a/node_modules/tuf-js/dist/error.js b/node_modules/tuf-js/dist/error.js
new file mode 100644
index 0000000000000..ce7ca5ea06ceb
--- /dev/null
+++ b/node_modules/tuf-js/dist/error.js
@@ -0,0 +1,62 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.UnsignedMetadataError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+class RuntimeError extends Error {
+}
+exports.RuntimeError = RuntimeError;
+class PersistError extends Error {
+}
+exports.PersistError = PersistError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error about metadata object with insufficient threshold of signatures.
+class UnsignedMetadataError extends RepositoryError {
+}
+exports.UnsignedMetadataError = UnsignedMetadataError;
+// An error for metadata that contains an invalid version number.
+class BadVersionError extends RepositoryError {
+}
+exports.BadVersionError = BadVersionError;
+// An error for metadata containing a previously verified version number.
+class EqualVersionError extends BadVersionError {
+}
+exports.EqualVersionError = EqualVersionError;
+// Indicate that a TUF Metadata file has expired.
+class ExpiredMetadataError extends RepositoryError {
+}
+exports.ExpiredMetadataError = ExpiredMetadataError;
+// An error while checking the length and hash values of an object.
+class LengthOrHashMismatchError extends RepositoryError {
+}
+exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
+class CryptoError extends Error {
+}
+exports.CryptoError = CryptoError;
+class UnsupportedAlgorithmError extends CryptoError {
+}
+exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
+//----- Download Errors -------------------------------------------------------
+// An error occurred while attempting to download a file.
+class DownloadError extends Error {
+}
+exports.DownloadError = DownloadError;
+// Indicate that a mismatch of lengths was seen while downloading a file
+class DownloadLengthMismatchError extends DownloadError {
+}
+exports.DownloadLengthMismatchError = DownloadLengthMismatchError;
+// Returned by FetcherInterface implementations for HTTP errors.
+class DownloadHTTPError extends DownloadError {
+    constructor(message, statusCode) {
+        super(message);
+        this.statusCode = statusCode;
+    }
+}
+exports.DownloadHTTPError = DownloadHTTPError;
diff --git a/node_modules/tuf-js/dist/fetcher.d.ts b/node_modules/tuf-js/dist/fetcher.d.ts
new file mode 100644
index 0000000000000..2b52cbef52326
--- /dev/null
+++ b/node_modules/tuf-js/dist/fetcher.d.ts
@@ -0,0 +1,19 @@
+/// <reference types="node" />
+/// <reference types="node" />
+type DownloadFileHandler<T> = (file: string) => Promise<T>;
+export declare abstract class BaseFetcher {
+    abstract fetch(url: string): Promise<NodeJS.ReadableStream>;
+    downloadFile<T>(url: string, maxLength: number, handler: DownloadFileHandler<T>): Promise<T>;
+    downloadBytes(url: string, maxLength: number): Promise<Buffer>;
+}
+interface FetcherOptions {
+    timeout?: number;
+    retries?: number;
+}
+export declare class Fetcher extends BaseFetcher {
+    private timeout?;
+    private retries?;
+    constructor(options?: FetcherOptions);
+    fetch(url: string): Promise<NodeJS.ReadableStream>;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/fetcher.js b/node_modules/tuf-js/dist/fetcher.js
new file mode 100644
index 0000000000000..cb42ab22a1d31
--- /dev/null
+++ b/node_modules/tuf-js/dist/fetcher.js
@@ -0,0 +1,81 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Fetcher = exports.BaseFetcher = void 0;
+const fs_1 = __importDefault(require("fs"));
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const tmpfile_1 = require("./utils/tmpfile");
+class BaseFetcher {
+    // Download file from given URL. The file is downloaded to a temporary
+    // location and then passed to the given handler. The handler is responsible
+    // for moving the file to its final location. The temporary file is deleted
+    // after the handler returns.
+    async downloadFile(url, maxLength, handler) {
+        return (0, tmpfile_1.withTempFile)(async (tmpFile) => {
+            const reader = await this.fetch(url);
+            let numberOfBytesReceived = 0;
+            const fileStream = fs_1.default.createWriteStream(tmpFile);
+            // Read the stream a chunk at a time so that we can check
+            // the length of the file as we go
+            try {
+                for await (const chunk of reader) {
+                    const bufferChunk = Buffer.from(chunk);
+                    numberOfBytesReceived += bufferChunk.length;
+                    if (numberOfBytesReceived > maxLength) {
+                        throw new error_1.DownloadLengthMismatchError('Max length reached');
+                    }
+                    await writeBufferToStream(fileStream, bufferChunk);
+                }
+            }
+            finally {
+                // Make sure we always close the stream
+                await util_1.default.promisify(fileStream.close).bind(fileStream)();
+            }
+            return handler(tmpFile);
+        });
+    }
+    // Download bytes from given URL.
+    async downloadBytes(url, maxLength) {
+        return this.downloadFile(url, maxLength, async (file) => {
+            const stream = fs_1.default.createReadStream(file);
+            const chunks = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+            return Buffer.concat(chunks);
+        });
+    }
+}
+exports.BaseFetcher = BaseFetcher;
+class Fetcher extends BaseFetcher {
+    constructor(options = {}) {
+        super();
+        this.timeout = options.timeout;
+        this.retries = options.retries;
+    }
+    async fetch(url) {
+        const response = await (0, make_fetch_happen_1.default)(url, {
+            timeout: this.timeout,
+            retry: this.retries,
+        });
+        if (!response.ok || !response?.body) {
+            throw new error_1.DownloadHTTPError('Failed to download', response.status);
+        }
+        return response.body;
+    }
+}
+exports.Fetcher = Fetcher;
+const writeBufferToStream = async (stream, buffer) => {
+    return new Promise((resolve, reject) => {
+        stream.write(buffer, (err) => {
+            if (err) {
+                reject(err);
+            }
+            resolve(true);
+        });
+    });
+};
diff --git a/node_modules/tuf-js/dist/index.d.ts b/node_modules/tuf-js/dist/index.d.ts
new file mode 100644
index 0000000000000..bfe3adcac2aef
--- /dev/null
+++ b/node_modules/tuf-js/dist/index.d.ts
@@ -0,0 +1,3 @@
+export { BaseFetcher } from './fetcher';
+export { TargetFile } from './models/file';
+export { Updater } from './updater';
diff --git a/node_modules/tuf-js/dist/index.js b/node_modules/tuf-js/dist/index.js
new file mode 100644
index 0000000000000..6245d1724a208
--- /dev/null
+++ b/node_modules/tuf-js/dist/index.js
@@ -0,0 +1,9 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = exports.TargetFile = exports.BaseFetcher = void 0;
+var fetcher_1 = require("./fetcher");
+Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } });
+var file_1 = require("./models/file");
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
+var updater_1 = require("./updater");
+Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } });
diff --git a/node_modules/tuf-js/dist/models/base.d.ts b/node_modules/tuf-js/dist/models/base.d.ts
new file mode 100644
index 0000000000000..4c5e0aaf4faaa
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/base.d.ts
@@ -0,0 +1,30 @@
+import { JSONObject, JSONValue } from '../utils/types';
+import { Signature } from './signature';
+export interface Signable {
+    signatures: Record<string, Signature>;
+    signed: Signed;
+}
+export interface SignedOptions {
+    version?: number;
+    specVersion?: string;
+    expires?: string;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+/***
+ * A base class for the signed part of TUF metadata.
+ *
+ * Objects with base class Signed are usually included in a ``Metadata`` object
+ * on the signed attribute. This class provides attributes and methods that
+ * are common for all TUF metadata types (roles).
+ */
+export declare abstract class Signed {
+    readonly specVersion: string;
+    readonly expires: string;
+    readonly version: number;
+    readonly unrecognizedFields: Record<string, JSONValue>;
+    constructor(options: SignedOptions);
+    equals(other: Signed): boolean;
+    isExpired(referenceTime?: Date): boolean;
+    static commonFieldsFromJSON(data: JSONObject): SignedOptions;
+    abstract toJSON(): JSONObject;
+}
diff --git a/node_modules/tuf-js/dist/models/base.js b/node_modules/tuf-js/dist/models/base.js
new file mode 100644
index 0000000000000..7658567e2d602
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/base.js
@@ -0,0 +1,71 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signed = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const utils_1 = require("../utils");
+const SPECIFICATION_VERSION = ['1', '0', '31'];
+/***
+ * A base class for the signed part of TUF metadata.
+ *
+ * Objects with base class Signed are usually included in a ``Metadata`` object
+ * on the signed attribute. This class provides attributes and methods that
+ * are common for all TUF metadata types (roles).
+ */
+class Signed {
+    constructor(options) {
+        this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
+        const specList = this.specVersion.split('.');
+        if (!(specList.length === 2 || specList.length === 3) ||
+            !specList.every((item) => isNumeric(item))) {
+            throw new error_1.ValueError('Failed to parse specVersion');
+        }
+        // major version must match
+        if (specList[0] != SPECIFICATION_VERSION[0]) {
+            throw new error_1.ValueError('Unsupported specVersion');
+        }
+        this.expires = options.expires || new Date().toISOString();
+        this.version = options.version || 1;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Signed)) {
+            return false;
+        }
+        return (this.specVersion === other.specVersion &&
+            this.expires === other.expires &&
+            this.version === other.version &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    isExpired(referenceTime) {
+        if (!referenceTime) {
+            referenceTime = new Date();
+        }
+        return referenceTime >= new Date(this.expires);
+    }
+    static commonFieldsFromJSON(data) {
+        const { spec_version, expires, version, ...rest } = data;
+        if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) {
+            throw new TypeError('spec_version must be a string');
+        }
+        if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) {
+            throw new TypeError('expires must be a string');
+        }
+        if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) {
+            throw new TypeError('version must be a number');
+        }
+        return {
+            specVersion: spec_version,
+            expires,
+            version,
+            unrecognizedFields: rest,
+        };
+    }
+}
+exports.Signed = Signed;
+function isNumeric(str) {
+    return !isNaN(Number(str));
+}
diff --git a/node_modules/tuf-js/dist/models/delegations.d.ts b/node_modules/tuf-js/dist/models/delegations.d.ts
new file mode 100644
index 0000000000000..b53862aa865be
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/delegations.d.ts
@@ -0,0 +1,32 @@
+import { JSONObject, JSONValue } from '../utils/types';
+import { Key } from './key';
+import { DelegatedRole, SuccinctRoles } from './role';
+type DelegatedRoleMap = Record<string, DelegatedRole>;
+type KeyMap = Record<string, Key>;
+interface DelegationsOptions {
+    keys: KeyMap;
+    roles?: DelegatedRoleMap;
+    succinctRoles?: SuccinctRoles;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+/**
+ * A container object storing information about all delegations.
+ *
+ * Targets roles that are trusted to provide signed metadata files
+ * describing targets with designated pathnames and/or further delegations.
+ */
+export declare class Delegations {
+    readonly keys: KeyMap;
+    readonly roles?: DelegatedRoleMap;
+    readonly unrecognizedFields?: Record<string, JSONValue>;
+    readonly succinctRoles?: SuccinctRoles;
+    constructor(options: DelegationsOptions);
+    equals(other: Delegations): boolean;
+    rolesForTarget(targetPath: string): Generator<{
+        role: string;
+        terminating: boolean;
+    }>;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Delegations;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/delegations.js b/node_modules/tuf-js/dist/models/delegations.js
new file mode 100644
index 0000000000000..302bd52d8d885
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/delegations.js
@@ -0,0 +1,115 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Delegations = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+const key_1 = require("./key");
+const role_1 = require("./role");
+/**
+ * A container object storing information about all delegations.
+ *
+ * Targets roles that are trusted to provide signed metadata files
+ * describing targets with designated pathnames and/or further delegations.
+ */
+class Delegations {
+    constructor(options) {
+        this.keys = options.keys;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+        if (options.roles) {
+            if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
+                throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
+            }
+        }
+        this.succinctRoles = options.succinctRoles;
+        this.roles = options.roles;
+    }
+    equals(other) {
+        if (!(other instanceof Delegations)) {
+            return false;
+        }
+        return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
+            util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
+    }
+    *rolesForTarget(targetPath) {
+        if (this.roles) {
+            for (const role of Object.values(this.roles)) {
+                if (role.isDelegatedPath(targetPath)) {
+                    yield { role: role.name, terminating: role.terminating };
+                }
+            }
+        }
+        else if (this.succinctRoles) {
+            yield {
+                role: this.succinctRoles.getRoleForTarget(targetPath),
+                terminating: true,
+            };
+        }
+    }
+    toJSON() {
+        const json = {
+            keys: keysToJSON(this.keys),
+            ...this.unrecognizedFields,
+        };
+        if (this.roles) {
+            json.roles = rolesToJSON(this.roles);
+        }
+        else if (this.succinctRoles) {
+            json.succinct_roles = this.succinctRoles.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
+        let succinctRoles;
+        if ((0, guard_1.isObject)(succinct_roles)) {
+            succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
+        }
+        return new Delegations({
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            unrecognizedFields,
+            succinctRoles,
+        });
+    }
+}
+exports.Delegations = Delegations;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyId, key]) => ({
+        ...acc,
+        [keyId]: key.toJSON(),
+    }), {});
+}
+function rolesToJSON(roles) {
+    return Object.values(roles).map((role) => role.toJSON());
+}
+function keysFromJSON(data) {
+    if (!(0, guard_1.isObjectRecord)(data)) {
+        throw new TypeError('keys is malformed');
+    }
+    return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+        ...acc,
+        [keyID]: key_1.Key.fromJSON(keyID, keyData),
+    }), {});
+}
+function rolesFromJSON(data) {
+    let roleMap;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObjectArray)(data)) {
+            throw new TypeError('roles is malformed');
+        }
+        roleMap = data.reduce((acc, role) => {
+            const delegatedRole = role_1.DelegatedRole.fromJSON(role);
+            return {
+                ...acc,
+                [delegatedRole.name]: delegatedRole,
+            };
+        }, {});
+    }
+    return roleMap;
+}
diff --git a/node_modules/tuf-js/dist/models/file.d.ts b/node_modules/tuf-js/dist/models/file.d.ts
new file mode 100644
index 0000000000000..9678cf1efefd5
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/file.d.ts
@@ -0,0 +1,40 @@
+/// <reference types="node" />
+/// <reference types="node" />
+import { Readable } from 'stream';
+import { JSONObject, JSONValue } from '../utils/types';
+interface MetaFileOptions {
+    version: number;
+    length?: number;
+    hashes?: Record<string, string>;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+export declare class MetaFile {
+    readonly version: number;
+    readonly length?: number;
+    readonly hashes?: Record<string, string>;
+    readonly unrecognizedFields?: Record<string, JSONValue>;
+    constructor(opts: MetaFileOptions);
+    equals(other: MetaFile): boolean;
+    verify(data: Buffer): void;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): MetaFile;
+}
+interface TargetFileOptions {
+    length: number;
+    path: string;
+    hashes: Record<string, string>;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+export declare class TargetFile {
+    readonly length: number;
+    readonly path: string;
+    readonly hashes: Record<string, string>;
+    readonly unrecognizedFields: Record<string, JSONValue>;
+    constructor(opts: TargetFileOptions);
+    get custom(): Record<string, unknown>;
+    equals(other: TargetFile): boolean;
+    verify(stream: Readable): Promise<void>;
+    toJSON(): JSONObject;
+    static fromJSON(path: string, data: JSONObject): TargetFile;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/file.js b/node_modules/tuf-js/dist/models/file.js
new file mode 100644
index 0000000000000..d6d535f6ca787
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/file.js
@@ -0,0 +1,183 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TargetFile = exports.MetaFile = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+// A container with information about a particular metadata file.
+//
+// This class is used for Timestamp and Snapshot metadata.
+class MetaFile {
+    constructor(opts) {
+        if (opts.version <= 0) {
+            throw new error_1.ValueError('Metafile version must be at least 1');
+        }
+        if (opts.length !== undefined) {
+            validateLength(opts.length);
+        }
+        this.version = opts.version;
+        this.length = opts.length;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof MetaFile)) {
+            return false;
+        }
+        return (this.version === other.version &&
+            this.length === other.length &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    verify(data) {
+        // Verifies that the given data matches the expected length.
+        if (this.length !== undefined) {
+            if (data.length !== this.length) {
+                throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
+            }
+        }
+        // Verifies that the given data matches the supplied hashes.
+        if (this.hashes) {
+            Object.entries(this.hashes).forEach(([key, value]) => {
+                let hash;
+                try {
+                    hash = crypto_1.default.createHash(key);
+                }
+                catch (e) {
+                    throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+                }
+                const observedHash = hash.update(data).digest('hex');
+                if (observedHash !== value) {
+                    throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
+                }
+            });
+        }
+    }
+    toJSON() {
+        const json = {
+            version: this.version,
+            ...this.unrecognizedFields,
+        };
+        if (this.length !== undefined) {
+            json.length = this.length;
+        }
+        if (this.hashes) {
+            json.hashes = this.hashes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { version, length, hashes, ...rest } = data;
+        if (typeof version !== 'number') {
+            throw new TypeError('version must be a number');
+        }
+        if ((0, guard_1.isDefined)(length) && typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if ((0, guard_1.isDefined)(hashes) && !(0, guard_1.isStringRecord)(hashes)) {
+            throw new TypeError('hashes must be string keys and values');
+        }
+        return new MetaFile({
+            version,
+            length,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.MetaFile = MetaFile;
+// Container for info about a particular target file.
+//
+// This class is used for Target metadata.
+class TargetFile {
+    constructor(opts) {
+        validateLength(opts.length);
+        this.length = opts.length;
+        this.path = opts.path;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    get custom() {
+        const custom = this.unrecognizedFields['custom'];
+        if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
+            return {};
+        }
+        return custom;
+    }
+    equals(other) {
+        if (!(other instanceof TargetFile)) {
+            return false;
+        }
+        return (this.length === other.length &&
+            this.path === other.path &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    async verify(stream) {
+        let observedLength = 0;
+        // Create a digest for each hash algorithm
+        const digests = Object.keys(this.hashes).reduce((acc, key) => {
+            try {
+                acc[key] = crypto_1.default.createHash(key);
+            }
+            catch (e) {
+                throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+            }
+            return acc;
+        }, {});
+        // Read stream chunk by chunk
+        for await (const chunk of stream) {
+            // Keep running tally of stream length
+            observedLength += chunk.length;
+            // Append chunk to each digest
+            Object.values(digests).forEach((digest) => {
+                digest.update(chunk);
+            });
+        }
+        // Verify length matches expected value
+        if (observedLength !== this.length) {
+            throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
+        }
+        // Verify each digest matches expected value
+        Object.entries(digests).forEach(([key, value]) => {
+            const expected = this.hashes[key];
+            const actual = value.digest('hex');
+            if (actual !== expected) {
+                throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
+            }
+        });
+    }
+    toJSON() {
+        return {
+            length: this.length,
+            hashes: this.hashes,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(path, data) {
+        const { length, hashes, ...rest } = data;
+        if (typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if (!(0, guard_1.isStringRecord)(hashes)) {
+            throw new TypeError('hashes must have string keys and values');
+        }
+        return new TargetFile({
+            length,
+            path,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.TargetFile = TargetFile;
+// Check that supplied length if valid
+function validateLength(length) {
+    if (length < 0) {
+        throw new error_1.ValueError('Length must be at least 0');
+    }
+}
diff --git a/node_modules/tuf-js/dist/models/index.d.ts b/node_modules/tuf-js/dist/models/index.d.ts
new file mode 100644
index 0000000000000..58d779159215b
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/index.d.ts
@@ -0,0 +1,5 @@
+export { Metadata } from './metadata';
+export { Root } from './root';
+export { Snapshot } from './snapshot';
+export { Targets } from './targets';
+export { Timestamp } from './timestamp';
diff --git a/node_modules/tuf-js/dist/models/index.js b/node_modules/tuf-js/dist/models/index.js
new file mode 100644
index 0000000000000..aa3d828cf9b43
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/index.js
@@ -0,0 +1,13 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = exports.Targets = exports.Snapshot = exports.Root = exports.Metadata = void 0;
+var metadata_1 = require("./metadata");
+Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
+var root_1 = require("./root");
+Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
+var snapshot_1 = require("./snapshot");
+Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
+var targets_1 = require("./targets");
+Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/tuf-js/dist/models/key.d.ts b/node_modules/tuf-js/dist/models/key.d.ts
new file mode 100644
index 0000000000000..160407ae70ee3
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/key.d.ts
@@ -0,0 +1,21 @@
+import { JSONObject, JSONValue } from '../utils/types';
+import { Signable } from './base';
+export interface KeyOptions {
+    keyID: string;
+    keyType: string;
+    scheme: string;
+    keyVal: Record<string, string>;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+export declare class Key {
+    readonly keyID: string;
+    readonly keyType: string;
+    readonly scheme: string;
+    readonly keyVal: Record<string, string>;
+    readonly unrecognizedFields?: Record<string, JSONValue>;
+    constructor(options: KeyOptions);
+    verifySignature(metadata: Signable): void;
+    equals(other: Key): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(keyID: string, data: JSONObject): Key;
+}
diff --git a/node_modules/tuf-js/dist/models/key.js b/node_modules/tuf-js/dist/models/key.js
new file mode 100644
index 0000000000000..33ff514fc178f
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/key.js
@@ -0,0 +1,109 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Key = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+const key_1 = require("../utils/key");
+const signer = __importStar(require("../utils/signer"));
+// A container class representing the public portion of a Key.
+class Key {
+    constructor(options) {
+        const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
+        this.keyID = keyID;
+        this.keyType = keyType;
+        this.scheme = scheme;
+        this.keyVal = keyVal;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    // Verifies the that the metadata.signatures contains a signature made with
+    // this key and is correctly signed.
+    verifySignature(metadata) {
+        const signature = metadata.signatures[this.keyID];
+        if (!signature)
+            throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
+        if (!this.keyVal.public)
+            throw new error_1.UnsignedMetadataError('no public key found');
+        const publicKey = (0, key_1.getPublicKey)({
+            keyType: this.keyType,
+            scheme: this.scheme,
+            keyVal: this.keyVal.public,
+        });
+        const signedData = metadata.signed.toJSON();
+        try {
+            if (!signer.verifySignature(signedData, publicKey, signature.sig)) {
+                throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+            }
+        }
+        catch (error) {
+            if (error instanceof error_1.UnsignedMetadataError) {
+                throw error;
+            }
+            throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Key)) {
+            return false;
+        }
+        return (this.keyID === other.keyID &&
+            this.keyType === other.keyType &&
+            this.scheme === other.scheme &&
+            util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keytype: this.keyType,
+            scheme: this.scheme,
+            keyval: this.keyVal,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(keyID, data) {
+        const { keytype, scheme, keyval, ...rest } = data;
+        if (typeof keytype !== 'string') {
+            throw new TypeError('keytype must be a string');
+        }
+        if (typeof scheme !== 'string') {
+            throw new TypeError('scheme must be a string');
+        }
+        if (!(0, guard_1.isStringRecord)(keyval)) {
+            throw new TypeError('keyval must be a string record');
+        }
+        return new Key({
+            keyID,
+            keyType: keytype,
+            scheme,
+            keyVal: keyval,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Key = Key;
diff --git a/node_modules/tuf-js/dist/models/metadata.d.ts b/node_modules/tuf-js/dist/models/metadata.d.ts
new file mode 100644
index 0000000000000..39abf03406449
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/metadata.d.ts
@@ -0,0 +1,45 @@
+import { JSONObject, JSONValue, MetadataKind } from '../utils/types';
+import { Signable } from './base';
+import { Root } from './root';
+import { Signature } from './signature';
+import { Snapshot } from './snapshot';
+import { Targets } from './targets';
+import { Timestamp } from './timestamp';
+type MetadataType = Root | Timestamp | Snapshot | Targets;
+/***
+ * A container for signed TUF metadata.
+ *
+ * Provides methods to convert to and from json, read and write to and
+ * from JSON and to create and verify metadata signatures.
+ *
+ * ``Metadata[T]`` is a generic container type where T can be any one type of
+ * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
+ * is to allow static type checking of the signed attribute in code using
+ * Metadata::
+ *
+ * root_md = Metadata[Root].fromJSON("root.json")
+ * # root_md type is now Metadata[Root]. This means signed and its
+ * # attributes like consistent_snapshot are now statically typed and the
+ * # types can be verified by static type checkers and shown by IDEs
+ *
+ * Using a type constraint is not required but not doing so means T is not a
+ * specific type so static typing cannot happen. Note that the type constraint
+ * ``[Root]`` is not validated at runtime (as pure annotations are not available
+ * then).
+ *
+ * Apart from ``expires`` all of the arguments to the inner constructors have
+ * reasonable default values for new metadata.
+ */
+export declare class Metadata<T extends MetadataType> implements Signable {
+    signed: T;
+    signatures: Record<string, Signature>;
+    unrecognizedFields: Record<string, JSONValue>;
+    constructor(signed: T, signatures?: Record<string, Signature>, unrecognizedFields?: Record<string, JSONValue>);
+    verifyDelegate(delegatedRole: string, delegatedMetadata: Metadata<MetadataType>): void;
+    equals(other: T): boolean;
+    static fromJSON(type: MetadataKind.Root, data: JSONObject): Metadata<Root>;
+    static fromJSON(type: MetadataKind.Timestamp, data: JSONObject): Metadata<Timestamp>;
+    static fromJSON(type: MetadataKind.Snapshot, data: JSONObject): Metadata<Snapshot>;
+    static fromJSON(type: MetadataKind.Targets, data: JSONObject): Metadata<Targets>;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/metadata.js b/node_modules/tuf-js/dist/models/metadata.js
new file mode 100644
index 0000000000000..11c3c546822ac
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/metadata.js
@@ -0,0 +1,139 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Metadata = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+const types_1 = require("../utils/types");
+const root_1 = require("./root");
+const signature_1 = require("./signature");
+const snapshot_1 = require("./snapshot");
+const targets_1 = require("./targets");
+const timestamp_1 = require("./timestamp");
+/***
+ * A container for signed TUF metadata.
+ *
+ * Provides methods to convert to and from json, read and write to and
+ * from JSON and to create and verify metadata signatures.
+ *
+ * ``Metadata[T]`` is a generic container type where T can be any one type of
+ * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
+ * is to allow static type checking of the signed attribute in code using
+ * Metadata::
+ *
+ * root_md = Metadata[Root].fromJSON("root.json")
+ * # root_md type is now Metadata[Root]. This means signed and its
+ * # attributes like consistent_snapshot are now statically typed and the
+ * # types can be verified by static type checkers and shown by IDEs
+ *
+ * Using a type constraint is not required but not doing so means T is not a
+ * specific type so static typing cannot happen. Note that the type constraint
+ * ``[Root]`` is not validated at runtime (as pure annotations are not available
+ * then).
+ *
+ * Apart from ``expires`` all of the arguments to the inner constructors have
+ * reasonable default values for new metadata.
+ */
+class Metadata {
+    constructor(signed, signatures, unrecognizedFields) {
+        this.signed = signed;
+        this.signatures = signatures || {};
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    verifyDelegate(delegatedRole, delegatedMetadata) {
+        let role;
+        let keys = {};
+        switch (this.signed.type) {
+            case types_1.MetadataKind.Root:
+                keys = this.signed.keys;
+                role = this.signed.roles[delegatedRole];
+                break;
+            case types_1.MetadataKind.Targets:
+                if (!this.signed.delegations) {
+                    throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
+                }
+                keys = this.signed.delegations.keys;
+                if (this.signed.delegations.roles) {
+                    role = this.signed.delegations.roles[delegatedRole];
+                }
+                else if (this.signed.delegations.succinctRoles) {
+                    if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
+                        role = this.signed.delegations.succinctRoles;
+                    }
+                }
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        if (!role) {
+            throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
+        }
+        const signingKeys = new Set();
+        role.keyIDs.forEach((keyID) => {
+            const key = keys[keyID];
+            // If we dont' have the key, continue checking other keys
+            if (!key) {
+                return;
+            }
+            try {
+                key.verifySignature(delegatedMetadata);
+                signingKeys.add(key.keyID);
+            }
+            catch (error) {
+                // continue
+            }
+        });
+        if (signingKeys.size < role.threshold) {
+            throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Metadata)) {
+            return false;
+        }
+        return (this.signed.equals(other.signed) &&
+            util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    static fromJSON(type, data) {
+        const { signed, signatures, ...rest } = data;
+        if (!(0, guard_1.isDefined)(signed) || !(0, guard_1.isObject)(signed)) {
+            throw new TypeError('signed is not defined');
+        }
+        if (type !== signed._type) {
+            throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
+        }
+        let signedObj;
+        switch (type) {
+            case types_1.MetadataKind.Root:
+                signedObj = root_1.Root.fromJSON(signed);
+                break;
+            case types_1.MetadataKind.Timestamp:
+                signedObj = timestamp_1.Timestamp.fromJSON(signed);
+                break;
+            case types_1.MetadataKind.Snapshot:
+                signedObj = snapshot_1.Snapshot.fromJSON(signed);
+                break;
+            case types_1.MetadataKind.Targets:
+                signedObj = targets_1.Targets.fromJSON(signed);
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        const sigMap = signaturesFromJSON(signatures);
+        return new Metadata(signedObj, sigMap, rest);
+    }
+}
+exports.Metadata = Metadata;
+function signaturesFromJSON(data) {
+    if (!(0, guard_1.isObjectArray)(data)) {
+        throw new TypeError('signatures is not an array');
+    }
+    return data.reduce((acc, sigData) => {
+        const signature = signature_1.Signature.fromJSON(sigData);
+        return { ...acc, [signature.keyID]: signature };
+    }, {});
+}
diff --git a/node_modules/tuf-js/dist/models/role.d.ts b/node_modules/tuf-js/dist/models/role.d.ts
new file mode 100644
index 0000000000000..4575300fb972f
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/role.d.ts
@@ -0,0 +1,103 @@
+import { JSONObject, JSONValue } from '../utils/types';
+export declare const TOP_LEVEL_ROLE_NAMES: string[];
+export interface RoleOptions {
+    keyIDs: string[];
+    threshold: number;
+    unrecognizedFields?: Record<string, JSONValue>;
+}
+/**
+ * Container that defines which keys are required to sign roles metadata.
+ *
+ * Role defines how many keys are required to successfully sign the roles
+ * metadata, and which keys are accepted.
+ */
+export declare class Role {
+    readonly keyIDs: string[];
+    readonly threshold: number;
+    readonly unrecognizedFields?: Record<string, JSONValue>;
+    constructor(options: RoleOptions);
+    equals(other: Role): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Role;
+}
+interface DelegatedRoleOptions extends RoleOptions {
+    name: string;
+    terminating: boolean;
+    paths?: string[];
+    pathHashPrefixes?: string[];
+}
+/**
+ * A container with information about a delegated role.
+ *
+ * A delegation can happen in two ways:
+ *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
+ *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
+ *      starts with any of the prefixes in ``pathHashPrefixes``
+ *
+ *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
+ *   set, at least one of them must be set.
+ */
+export declare class DelegatedRole extends Role {
+    readonly name: string;
+    readonly terminating: boolean;
+    readonly paths?: string[];
+    readonly pathHashPrefixes?: string[];
+    constructor(opts: DelegatedRoleOptions);
+    equals(other: DelegatedRole): boolean;
+    isDelegatedPath(targetFilepath: string): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): DelegatedRole;
+}
+interface SuccinctRolesOption extends RoleOptions {
+    bitLength: number;
+    namePrefix: string;
+}
+/**
+ * Succinctly defines a hash bin delegation graph.
+ *
+ * A ``SuccinctRoles`` object describes a delegation graph that covers all
+ * targets, distributing them uniformly over the delegated roles (i.e. bins)
+ * in the graph.
+ *
+ * The total number of bins is 2 to the power of the passed ``bit_length``.
+ *
+ * Bin names are the concatenation of the passed ``name_prefix`` and a
+ * zero-padded hex representation of the bin index separated by a hyphen.
+ *
+ * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
+ * is 'terminating'.
+ *
+ * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
+ */
+export declare class SuccinctRoles extends Role {
+    readonly bitLength: number;
+    readonly namePrefix: string;
+    readonly numberOfBins: number;
+    readonly suffixLen: number;
+    constructor(opts: SuccinctRolesOption);
+    equals(other: SuccinctRoles): boolean;
+    /***
+     * Calculates the name of the delegated role responsible for 'target_filepath'.
+     *
+     * The target at path ''target_filepath' is assigned to a bin by casting
+     * the left-most 'bit_length' of bits of the file path hash digest to
+     * int, using it as bin index between 0 and '2**bit_length - 1'.
+     *
+     * Args:
+     *  target_filepath: URL path to a target file, relative to a base
+     *  targets URL.
+     */
+    getRoleForTarget(targetFilepath: string): string;
+    getRoles(): Generator<string>;
+    /***
+     * Determines whether the given ``role_name`` is in one of
+     * the delegated roles that ``SuccinctRoles`` represents.
+     *
+     * Args:
+     *  role_name: The name of the role to check against.
+     */
+    isDelegatedRole(roleName: string): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): SuccinctRoles;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/role.js b/node_modules/tuf-js/dist/models/role.js
new file mode 100644
index 0000000000000..da80a09b8b09f
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/role.js
@@ -0,0 +1,298 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const minimatch_1 = __importDefault(require("minimatch"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+exports.TOP_LEVEL_ROLE_NAMES = [
+    'root',
+    'targets',
+    'snapshot',
+    'timestamp',
+];
+/**
+ * Container that defines which keys are required to sign roles metadata.
+ *
+ * Role defines how many keys are required to successfully sign the roles
+ * metadata, and which keys are accepted.
+ */
+class Role {
+    constructor(options) {
+        const { keyIDs, threshold, unrecognizedFields } = options;
+        if (hasDuplicates(keyIDs)) {
+            throw new error_1.ValueError('duplicate key IDs found');
+        }
+        if (threshold < 1) {
+            throw new error_1.ValueError('threshold must be at least 1');
+        }
+        this.keyIDs = keyIDs;
+        this.threshold = threshold;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Role)) {
+            return false;
+        }
+        return (this.threshold === other.threshold &&
+            util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keyids: this.keyIDs,
+            threshold: this.threshold,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, ...rest } = data;
+        if (!(0, guard_1.isStringArray)(keyids)) {
+            throw new TypeError('keyids must be an array');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        return new Role({
+            keyIDs: keyids,
+            threshold,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Role = Role;
+function hasDuplicates(array) {
+    return new Set(array).size !== array.length;
+}
+/**
+ * A container with information about a delegated role.
+ *
+ * A delegation can happen in two ways:
+ *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
+ *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
+ *      starts with any of the prefixes in ``pathHashPrefixes``
+ *
+ *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
+ *   set, at least one of them must be set.
+ */
+class DelegatedRole extends Role {
+    constructor(opts) {
+        super(opts);
+        const { name, terminating, paths, pathHashPrefixes } = opts;
+        this.name = name;
+        this.terminating = terminating;
+        if (opts.paths && opts.pathHashPrefixes) {
+            throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
+        }
+        this.paths = paths;
+        this.pathHashPrefixes = pathHashPrefixes;
+    }
+    equals(other) {
+        if (!(other instanceof DelegatedRole)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.name === other.name &&
+            this.terminating === other.terminating &&
+            util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
+            util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
+    }
+    isDelegatedPath(targetFilepath) {
+        if (this.paths) {
+            return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
+        }
+        if (this.pathHashPrefixes) {
+            const hasher = crypto_1.default.createHash('sha256');
+            const pathHash = hasher.update(targetFilepath).digest('hex');
+            return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
+        }
+        return false;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            name: this.name,
+            terminating: this.terminating,
+        };
+        if (this.paths) {
+            json.paths = this.paths;
+        }
+        if (this.pathHashPrefixes) {
+            json.path_hash_prefixes = this.pathHashPrefixes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
+        if (!(0, guard_1.isStringArray)(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof name !== 'string') {
+            throw new TypeError('name must be a string');
+        }
+        if (typeof terminating !== 'boolean') {
+            throw new TypeError('terminating must be a boolean');
+        }
+        if ((0, guard_1.isDefined)(paths) && !(0, guard_1.isStringArray)(paths)) {
+            throw new TypeError('paths must be an array of strings');
+        }
+        if ((0, guard_1.isDefined)(path_hash_prefixes) && !(0, guard_1.isStringArray)(path_hash_prefixes)) {
+            throw new TypeError('path_hash_prefixes must be an array of strings');
+        }
+        return new DelegatedRole({
+            keyIDs: keyids,
+            threshold,
+            name,
+            terminating,
+            paths,
+            pathHashPrefixes: path_hash_prefixes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.DelegatedRole = DelegatedRole;
+// JS version of Ruby's Array#zip
+const zip = (a, b) => a.map((k, i) => [k, b[i]]);
+function isTargetInPathPattern(target, pattern) {
+    const targetParts = target.split('/');
+    const patternParts = pattern.split('/');
+    if (patternParts.length != targetParts.length) {
+        return false;
+    }
+    return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.default)(targetPart, patternPart));
+}
+/**
+ * Succinctly defines a hash bin delegation graph.
+ *
+ * A ``SuccinctRoles`` object describes a delegation graph that covers all
+ * targets, distributing them uniformly over the delegated roles (i.e. bins)
+ * in the graph.
+ *
+ * The total number of bins is 2 to the power of the passed ``bit_length``.
+ *
+ * Bin names are the concatenation of the passed ``name_prefix`` and a
+ * zero-padded hex representation of the bin index separated by a hyphen.
+ *
+ * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
+ * is 'terminating'.
+ *
+ * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
+ */
+class SuccinctRoles extends Role {
+    constructor(opts) {
+        super(opts);
+        const { bitLength, namePrefix } = opts;
+        if (bitLength <= 0 || bitLength > 32) {
+            throw new error_1.ValueError('bitLength must be between 1 and 32');
+        }
+        this.bitLength = bitLength;
+        this.namePrefix = namePrefix;
+        // Calculate the suffix_len value based on the total number of bins in
+        // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
+        // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
+        // meaning the third bin will have a suffix of "003".
+        this.numberOfBins = Math.pow(2, bitLength);
+        // suffix_len is calculated based on "number_of_bins - 1" as the name
+        // of the last bin contains the number "number_of_bins -1" as a suffix.
+        this.suffixLen = (this.numberOfBins - 1).toString(16).length;
+    }
+    equals(other) {
+        if (!(other instanceof SuccinctRoles)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.bitLength === other.bitLength &&
+            this.namePrefix === other.namePrefix);
+    }
+    /***
+     * Calculates the name of the delegated role responsible for 'target_filepath'.
+     *
+     * The target at path ''target_filepath' is assigned to a bin by casting
+     * the left-most 'bit_length' of bits of the file path hash digest to
+     * int, using it as bin index between 0 and '2**bit_length - 1'.
+     *
+     * Args:
+     *  target_filepath: URL path to a target file, relative to a base
+     *  targets URL.
+     */
+    getRoleForTarget(targetFilepath) {
+        const hasher = crypto_1.default.createHash('sha256');
+        const hasherBuffer = hasher.update(targetFilepath).digest();
+        // can't ever need more than 4 bytes (32 bits).
+        const hashBytes = hasherBuffer.subarray(0, 4);
+        // Right shift hash bytes, so that we only have the leftmost
+        // bit_length bits that we care about.
+        const shiftValue = 32 - this.bitLength;
+        const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
+        // Add zero padding if necessary and cast to hex the suffix.
+        const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
+        return `${this.namePrefix}-${suffix}`;
+    }
+    *getRoles() {
+        for (let i = 0; i < this.numberOfBins; i++) {
+            const suffix = i.toString(16).padStart(this.suffixLen, '0');
+            yield `${this.namePrefix}-${suffix}`;
+        }
+    }
+    /***
+     * Determines whether the given ``role_name`` is in one of
+     * the delegated roles that ``SuccinctRoles`` represents.
+     *
+     * Args:
+     *  role_name: The name of the role to check against.
+     */
+    isDelegatedRole(roleName) {
+        const desiredPrefix = this.namePrefix + '-';
+        if (!roleName.startsWith(desiredPrefix)) {
+            return false;
+        }
+        const suffix = roleName.slice(desiredPrefix.length, roleName.length);
+        if (suffix.length != this.suffixLen) {
+            return false;
+        }
+        // make sure the suffix is a hex string
+        if (!suffix.match(/^[0-9a-fA-F]+$/)) {
+            return false;
+        }
+        const num = parseInt(suffix, 16);
+        return 0 <= num && num < this.numberOfBins;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            bit_length: this.bitLength,
+            name_prefix: this.namePrefix,
+        };
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
+        if (!(0, guard_1.isStringArray)(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof bit_length !== 'number') {
+            throw new TypeError('bit_length must be a number');
+        }
+        if (typeof name_prefix !== 'string') {
+            throw new TypeError('name_prefix must be a string');
+        }
+        return new SuccinctRoles({
+            keyIDs: keyids,
+            threshold,
+            bitLength: bit_length,
+            namePrefix: name_prefix,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/tuf-js/dist/models/root.d.ts b/node_modules/tuf-js/dist/models/root.d.ts
new file mode 100644
index 0000000000000..66356628f4b8a
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/root.d.ts
@@ -0,0 +1,28 @@
+import { JSONObject, MetadataKind } from '../utils/types';
+import { Signed, SignedOptions } from './base';
+import { Key } from './key';
+import { Role } from './role';
+type KeyMap = Record<string, Key>;
+type RoleMap = Record<string, Role>;
+export interface RootOptions extends SignedOptions {
+    keys?: Record<string, Key>;
+    roles?: Record<string, Role>;
+    consistentSnapshot?: boolean;
+}
+/**
+ * A container for the signed part of root metadata.
+ *
+ * The top-level role and metadata file signed by the root keys.
+ * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
+ */
+export declare class Root extends Signed {
+    readonly type = MetadataKind.Root;
+    readonly keys: KeyMap;
+    readonly roles: RoleMap;
+    readonly consistentSnapshot: boolean;
+    constructor(options: RootOptions);
+    equals(other: Root): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Root;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/root.js b/node_modules/tuf-js/dist/models/root.js
new file mode 100644
index 0000000000000..574ec1acdcc39
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/root.js
@@ -0,0 +1,107 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Root = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("../error");
+const guard_1 = require("../utils/guard");
+const types_1 = require("../utils/types");
+const base_1 = require("./base");
+const key_1 = require("./key");
+const role_1 = require("./role");
+/**
+ * A container for the signed part of root metadata.
+ *
+ * The top-level role and metadata file signed by the root keys.
+ * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
+ */
+class Root extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = types_1.MetadataKind.Root;
+        this.keys = options.keys || {};
+        this.consistentSnapshot = options.consistentSnapshot ?? true;
+        if (!options.roles) {
+            this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
+                ...acc,
+                [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
+            }), {});
+        }
+        else {
+            const roleNames = new Set(Object.keys(options.roles));
+            if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
+                throw new error_1.ValueError('missing top-level role');
+            }
+            this.roles = options.roles;
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Root)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.consistentSnapshot === other.consistentSnapshot &&
+            util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles));
+    }
+    toJSON() {
+        return {
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            keys: keysToJSON(this.keys),
+            roles: rolesToJSON(this.roles),
+            consistent_snapshot: this.consistentSnapshot,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
+        if (typeof consistent_snapshot !== 'boolean') {
+            throw new TypeError('consistent_snapshot must be a boolean');
+        }
+        return new Root({
+            ...commonFields,
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            consistentSnapshot: consistent_snapshot,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Root = Root;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
+}
+function rolesToJSON(roles) {
+    return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
+}
+function keysFromJSON(data) {
+    let keys;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObjectRecord)(data)) {
+            throw new TypeError('keys must be an object');
+        }
+        keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+            ...acc,
+            [keyID]: key_1.Key.fromJSON(keyID, keyData),
+        }), {});
+    }
+    return keys;
+}
+function rolesFromJSON(data) {
+    let roles;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObjectRecord)(data)) {
+            throw new TypeError('roles must be an object');
+        }
+        roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
+            ...acc,
+            [roleName]: role_1.Role.fromJSON(roleData),
+        }), {});
+    }
+    return roles;
+}
diff --git a/node_modules/tuf-js/dist/models/signature.d.ts b/node_modules/tuf-js/dist/models/signature.d.ts
new file mode 100644
index 0000000000000..1d78e2d8e55d0
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/signature.d.ts
@@ -0,0 +1,19 @@
+import { JSONObject } from '../utils/types';
+export interface SignatureOptions {
+    keyID: string;
+    sig: string;
+}
+/**
+ * A container class containing information about a signature.
+ *
+ * Contains a signature and the keyid uniquely identifying the key used
+ * to generate the signature.
+ *
+ * Provide a `fromJSON` method to create a Signature from a JSON object.
+ */
+export declare class Signature {
+    readonly keyID: string;
+    readonly sig: string;
+    constructor(options: SignatureOptions);
+    static fromJSON(data: JSONObject): Signature;
+}
diff --git a/node_modules/tuf-js/dist/models/signature.js b/node_modules/tuf-js/dist/models/signature.js
new file mode 100644
index 0000000000000..9550fa7b551fc
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/signature.js
@@ -0,0 +1,32 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = void 0;
+/**
+ * A container class containing information about a signature.
+ *
+ * Contains a signature and the keyid uniquely identifying the key used
+ * to generate the signature.
+ *
+ * Provide a `fromJSON` method to create a Signature from a JSON object.
+ */
+class Signature {
+    constructor(options) {
+        const { keyID, sig } = options;
+        this.keyID = keyID;
+        this.sig = sig;
+    }
+    static fromJSON(data) {
+        const { keyid, sig } = data;
+        if (typeof keyid !== 'string') {
+            throw new TypeError('keyid must be a string');
+        }
+        if (typeof sig !== 'string') {
+            throw new TypeError('sig must be a string');
+        }
+        return new Signature({
+            keyID: keyid,
+            sig: sig,
+        });
+    }
+}
+exports.Signature = Signature;
diff --git a/node_modules/tuf-js/dist/models/snapshot.d.ts b/node_modules/tuf-js/dist/models/snapshot.d.ts
new file mode 100644
index 0000000000000..79bc07359509b
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/snapshot.d.ts
@@ -0,0 +1,23 @@
+import { JSONObject, MetadataKind } from '../utils/types';
+import { Signed, SignedOptions } from './base';
+import { MetaFile } from './file';
+type MetaFileMap = Record<string, MetaFile>;
+export interface SnapshotOptions extends SignedOptions {
+    meta?: MetaFileMap;
+}
+/**
+ * A container for the signed part of snapshot metadata.
+ *
+ * Snapshot contains information about all target Metadata files.
+ * A top-level role that specifies the latest versions of all targets metadata files,
+ * and hence the latest versions of all targets (including any dependencies between them) on the repository.
+ */
+export declare class Snapshot extends Signed {
+    readonly type = MetadataKind.Snapshot;
+    readonly meta: MetaFileMap;
+    constructor(opts: SnapshotOptions);
+    equals(other: Snapshot): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Snapshot;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/snapshot.js b/node_modules/tuf-js/dist/models/snapshot.js
new file mode 100644
index 0000000000000..0945a28cd03cc
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/snapshot.js
@@ -0,0 +1,71 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Snapshot = void 0;
+const util_1 = __importDefault(require("util"));
+const guard_1 = require("../utils/guard");
+const types_1 = require("../utils/types");
+const base_1 = require("./base");
+const file_1 = require("./file");
+/**
+ * A container for the signed part of snapshot metadata.
+ *
+ * Snapshot contains information about all target Metadata files.
+ * A top-level role that specifies the latest versions of all targets metadata files,
+ * and hence the latest versions of all targets (including any dependencies between them) on the repository.
+ */
+class Snapshot extends base_1.Signed {
+    constructor(opts) {
+        super(opts);
+        this.type = types_1.MetadataKind.Snapshot;
+        this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
+    }
+    equals(other) {
+        if (!(other instanceof Snapshot)) {
+            return false;
+        }
+        return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
+    }
+    toJSON() {
+        return {
+            meta: metaToJSON(this.meta),
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Snapshot({
+            ...commonFields,
+            meta: metaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Snapshot = Snapshot;
+function metaToJSON(meta) {
+    return Object.entries(meta).reduce((acc, [path, metadata]) => ({
+        ...acc,
+        [path]: metadata.toJSON(),
+    }), {});
+}
+function metaFromJSON(data) {
+    let meta;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObjectRecord)(data)) {
+            throw new TypeError('meta field is malformed');
+        }
+        else {
+            meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
+                ...acc,
+                [path]: file_1.MetaFile.fromJSON(metadata),
+            }), {});
+        }
+        return meta;
+    }
+}
diff --git a/node_modules/tuf-js/dist/models/targets.d.ts b/node_modules/tuf-js/dist/models/targets.d.ts
new file mode 100644
index 0000000000000..24dba9ac71580
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/targets.d.ts
@@ -0,0 +1,19 @@
+import { JSONObject, MetadataKind } from '../utils/types';
+import { Signed, SignedOptions } from './base';
+import { Delegations } from './delegations';
+import { TargetFile } from './file';
+type TargetFileMap = Record<string, TargetFile>;
+interface TargetsOptions extends SignedOptions {
+    targets?: TargetFileMap;
+    delegations?: Delegations;
+}
+export declare class Targets extends Signed {
+    readonly type = MetadataKind.Targets;
+    readonly targets: TargetFileMap;
+    readonly delegations?: Delegations;
+    constructor(options: TargetsOptions);
+    equals(other: Targets): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Targets;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/targets.js b/node_modules/tuf-js/dist/models/targets.js
new file mode 100644
index 0000000000000..90a2528764708
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/targets.js
@@ -0,0 +1,89 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Targets = void 0;
+const util_1 = __importDefault(require("util"));
+const guard_1 = require("../utils/guard");
+const types_1 = require("../utils/types");
+const base_1 = require("./base");
+const delegations_1 = require("./delegations");
+const file_1 = require("./file");
+// Container for the signed part of targets metadata.
+//
+// Targets contains verifying information about target files and also delegates
+// responsible to other Targets roles.
+class Targets extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = types_1.MetadataKind.Targets;
+        this.targets = options.targets || {};
+        this.delegations = options.delegations;
+    }
+    equals(other) {
+        if (!(other instanceof Targets)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
+            util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
+    }
+    toJSON() {
+        const json = {
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            targets: targetsToJSON(this.targets),
+            ...this.unrecognizedFields,
+        };
+        if (this.delegations) {
+            json.delegations = this.delegations.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { targets, delegations, ...rest } = unrecognizedFields;
+        return new Targets({
+            ...commonFields,
+            targets: targetsFromJSON(targets),
+            delegations: delegationsFromJSON(delegations),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Targets = Targets;
+function targetsToJSON(targets) {
+    return Object.entries(targets).reduce((acc, [path, target]) => ({
+        ...acc,
+        [path]: target.toJSON(),
+    }), {});
+}
+function targetsFromJSON(data) {
+    let targets;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObjectRecord)(data)) {
+            throw new TypeError('targets must be an object');
+        }
+        else {
+            targets = Object.entries(data).reduce((acc, [path, target]) => ({
+                ...acc,
+                [path]: file_1.TargetFile.fromJSON(path, target),
+            }), {});
+        }
+    }
+    return targets;
+}
+function delegationsFromJSON(data) {
+    let delegations;
+    if ((0, guard_1.isDefined)(data)) {
+        if (!(0, guard_1.isObject)(data)) {
+            throw new TypeError('delegations must be an object');
+        }
+        else {
+            delegations = delegations_1.Delegations.fromJSON(data);
+        }
+    }
+    return delegations;
+}
diff --git a/node_modules/tuf-js/dist/models/timestamp.d.ts b/node_modules/tuf-js/dist/models/timestamp.d.ts
new file mode 100644
index 0000000000000..481ada8e238d5
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/timestamp.d.ts
@@ -0,0 +1,21 @@
+import { JSONObject, MetadataKind } from '../utils/types';
+import { Signed, SignedOptions } from './base';
+import { MetaFile } from './file';
+interface TimestampOptions extends SignedOptions {
+    snapshotMeta?: MetaFile;
+}
+/**
+ * A container for the signed part of timestamp metadata.
+ *
+ * A top-level that specifies the latest version of the snapshot role metadata file,
+ * and hence the latest versions of all metadata and targets on the repository.
+ */
+export declare class Timestamp extends Signed {
+    readonly type = MetadataKind.Timestamp;
+    readonly snapshotMeta: MetaFile;
+    constructor(options: TimestampOptions);
+    equals(other: Timestamp): boolean;
+    toJSON(): JSONObject;
+    static fromJSON(data: JSONObject): Timestamp;
+}
+export {};
diff --git a/node_modules/tuf-js/dist/models/timestamp.js b/node_modules/tuf-js/dist/models/timestamp.js
new file mode 100644
index 0000000000000..84f681b68d16a
--- /dev/null
+++ b/node_modules/tuf-js/dist/models/timestamp.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+const guard_1 = require("../utils/guard");
+const types_1 = require("../utils/types");
+const base_1 = require("./base");
+const file_1 = require("./file");
+/**
+ * A container for the signed part of timestamp metadata.
+ *
+ * A top-level that specifies the latest version of the snapshot role metadata file,
+ * and hence the latest versions of all metadata and targets on the repository.
+ */
+class Timestamp extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = types_1.MetadataKind.Timestamp;
+        this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
+    }
+    equals(other) {
+        if (!(other instanceof Timestamp)) {
+            return false;
+        }
+        return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
+    }
+    toJSON() {
+        return {
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Timestamp({
+            ...commonFields,
+            snapshotMeta: snapshotMetaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Timestamp = Timestamp;
+function snapshotMetaFromJSON(data) {
+    let snapshotMeta;
+    if ((0, guard_1.isDefined)(data)) {
+        const snapshotData = data['snapshot.json'];
+        if (!(0, guard_1.isDefined)(snapshotData) || !(0, guard_1.isObject)(snapshotData)) {
+            throw new TypeError('missing snapshot.json in meta');
+        }
+        else {
+            snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
+        }
+    }
+    return snapshotMeta;
+}
diff --git a/node_modules/tuf-js/dist/store.d.ts b/node_modules/tuf-js/dist/store.d.ts
new file mode 100644
index 0000000000000..a6e20ae559c8b
--- /dev/null
+++ b/node_modules/tuf-js/dist/store.d.ts
@@ -0,0 +1,19 @@
+/// <reference types="node" />
+import { Metadata, Root, Snapshot, Targets, Timestamp } from './models';
+export declare class TrustedMetadataStore {
+    private trustedSet;
+    private referenceTime;
+    constructor(rootData: Buffer);
+    get root(): Metadata<Root>;
+    get timestamp(): Metadata<Timestamp> | undefined;
+    get snapshot(): Metadata<Snapshot> | undefined;
+    get targets(): Metadata<Targets> | undefined;
+    getRole(name: string): Metadata<Targets> | undefined;
+    updateRoot(bytesBuffer: Buffer): Metadata<Root>;
+    updateTimestamp(bytesBuffer: Buffer): Metadata<Timestamp>;
+    updateSnapshot(bytesBuffer: Buffer, trusted?: boolean): Metadata<Snapshot>;
+    updateDelegatedTargets(bytesBuffer: Buffer, roleName: string, delegatorName: string): void;
+    private loadTrustedRoot;
+    private checkFinalTimestamp;
+    private checkFinalSnapsnot;
+}
diff --git a/node_modules/tuf-js/dist/store.js b/node_modules/tuf-js/dist/store.js
new file mode 100644
index 0000000000000..351a1961730bc
--- /dev/null
+++ b/node_modules/tuf-js/dist/store.js
@@ -0,0 +1,209 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedMetadataStore = void 0;
+const error_1 = require("./error");
+const models_1 = require("./models");
+const types_1 = require("./utils/types");
+class TrustedMetadataStore {
+    constructor(rootData) {
+        this.trustedSet = {};
+        // Client workflow 5.1: record fixed update start time
+        this.referenceTime = new Date();
+        // Client workflow 5.2: load trusted root metadata
+        this.loadTrustedRoot(rootData);
+    }
+    get root() {
+        if (!this.trustedSet.root) {
+            throw new ReferenceError('No trusted root metadata');
+        }
+        return this.trustedSet.root;
+    }
+    get timestamp() {
+        return this.trustedSet.timestamp;
+    }
+    get snapshot() {
+        return this.trustedSet.snapshot;
+    }
+    get targets() {
+        return this.trustedSet.targets;
+    }
+    getRole(name) {
+        return this.trustedSet[name];
+    }
+    updateRoot(bytesBuffer) {
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newRoot = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data);
+        if (newRoot.signed.type != types_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`);
+        }
+        // Client workflow 5.4: check for arbitrary software attack
+        this.root.verifyDelegate(types_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.5: check for rollback attack
+        if (newRoot.signed.version != this.root.signed.version + 1) {
+            throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`);
+        }
+        // Check that new root is signed by self
+        newRoot.verifyDelegate(types_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.7: set new root as trusted root
+        this.trustedSet.root = newRoot;
+        return newRoot;
+    }
+    updateTimestamp(bytesBuffer) {
+        if (this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update timestamp after snapshot');
+        }
+        if (this.root.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final root.json is expired');
+        }
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newTimestamp = models_1.Metadata.fromJSON(types_1.MetadataKind.Timestamp, data);
+        if (newTimestamp.signed.type != types_1.MetadataKind.Timestamp) {
+            throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`);
+        }
+        // Client workflow 5.4.2: check for arbitrary software attack
+        this.root.verifyDelegate(types_1.MetadataKind.Timestamp, newTimestamp);
+        if (this.timestamp) {
+            // Prevent rolling back timestamp version
+            // Client workflow 5.4.3.1: check for rollback attack
+            if (newTimestamp.signed.version < this.timestamp.signed.version) {
+                throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`);
+            }
+            //  Keep using old timestamp if versions are equal.
+            if (newTimestamp.signed.version === this.timestamp.signed.version) {
+                throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`);
+            }
+            // Prevent rolling back snapshot version
+            // Client workflow 5.4.3.2: check for rollback attack
+            const snapshotMeta = this.timestamp.signed.snapshotMeta;
+            const newSnapshotMeta = newTimestamp.signed.snapshotMeta;
+            if (newSnapshotMeta.version < snapshotMeta.version) {
+                throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`);
+            }
+        }
+        // expiry not checked to allow old timestamp to be used for rollback
+        // protection of new timestamp: expiry is checked in update_snapshot
+        this.trustedSet.timestamp = newTimestamp;
+        // Client workflow 5.4.4: check for freeze attack
+        this.checkFinalTimestamp();
+        return newTimestamp;
+    }
+    updateSnapshot(bytesBuffer, trusted = false) {
+        if (!this.timestamp) {
+            throw new error_1.RuntimeError('Cannot update snapshot before timestamp');
+        }
+        if (this.targets) {
+            throw new error_1.RuntimeError('Cannot update snapshot after targets');
+        }
+        // Snapshot cannot be loaded if final timestamp is expired
+        this.checkFinalTimestamp();
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        // Verify non-trusted data against the hashes in timestamp, if any.
+        // Trusted snapshot data has already been verified once.
+        // Client workflow 5.5.2: check against timestamp role's snaphsot hash
+        if (!trusted) {
+            snapshotMeta.verify(bytesBuffer);
+        }
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newSnapshot = models_1.Metadata.fromJSON(types_1.MetadataKind.Snapshot, data);
+        if (newSnapshot.signed.type != types_1.MetadataKind.Snapshot) {
+            throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`);
+        }
+        // Client workflow 5.5.3: check for arbitrary software attack
+        this.root.verifyDelegate(types_1.MetadataKind.Snapshot, newSnapshot);
+        // version check against meta version (5.5.4) is deferred to allow old
+        // snapshot to be used in rollback protection
+        // Client workflow 5.5.5: check for rollback attack
+        if (this.snapshot) {
+            Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => {
+                const newFileInfo = newSnapshot.signed.meta[fileName];
+                if (!newFileInfo) {
+                    throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`);
+                }
+                if (newFileInfo.version < fileInfo.version) {
+                    throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`);
+                }
+            });
+        }
+        this.trustedSet.snapshot = newSnapshot;
+        // snapshot is loaded, but we raise if it's not valid _final_ snapshot
+        // Client workflow 5.5.4 & 5.5.6
+        this.checkFinalSnapsnot();
+        return newSnapshot;
+    }
+    updateDelegatedTargets(bytesBuffer, roleName, delegatorName) {
+        if (!this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update delegated targets before snapshot');
+        }
+        // Targets cannot be loaded if final snapshot is expired or its version
+        // does not match meta version in timestamp.
+        this.checkFinalSnapsnot();
+        const delegator = this.trustedSet[delegatorName];
+        if (!delegator) {
+            throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`);
+        }
+        // Extract metadata for the delegated role from snapshot
+        const meta = this.snapshot.signed.meta?.[`${roleName}.json`];
+        if (!meta) {
+            throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`);
+        }
+        // Client workflow 5.6.2: check against snapshot role's targets hash
+        meta.verify(bytesBuffer);
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newDelegate = models_1.Metadata.fromJSON(types_1.MetadataKind.Targets, data);
+        if (newDelegate.signed.type != types_1.MetadataKind.Targets) {
+            throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`);
+        }
+        // Client workflow 5.6.3: check for arbitrary software attack
+        delegator.verifyDelegate(roleName, newDelegate);
+        // Client workflow 5.6.4: Check against snapshot role’s targets version
+        const version = newDelegate.signed.version;
+        if (version != meta.version) {
+            throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`);
+        }
+        // Client workflow 5.6.5: check for a freeze attack
+        if (newDelegate.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`);
+        }
+        this.trustedSet[roleName] = newDelegate;
+    }
+    // Verifies and loads data as trusted root metadata.
+    // Note that an expired initial root is still considered valid.
+    loadTrustedRoot(bytesBuffer) {
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const root = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data);
+        if (root.signed.type != types_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`);
+        }
+        root.verifyDelegate(types_1.MetadataKind.Root, root);
+        this.trustedSet['root'] = root;
+    }
+    checkFinalTimestamp() {
+        // Timestamp MUST be loaded
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.4.4: check for freeze attack
+        if (this.timestamp.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final timestamp.json is expired');
+        }
+    }
+    checkFinalSnapsnot() {
+        // Snapshot and timestamp MUST be loaded
+        if (!this.snapshot) {
+            throw new ReferenceError('No trusted snapshot metadata');
+        }
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.5.6: check for freeze attack
+        if (this.snapshot.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('snapshot.json is expired');
+        }
+        // Client workflow 5.5.4: check against timestamp role’s snapshot version
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        if (this.snapshot.signed.version !== snapshotMeta.version) {
+            throw new error_1.BadVersionError("Snapshot version doesn't match timestamp");
+        }
+    }
+}
+exports.TrustedMetadataStore = TrustedMetadataStore;
diff --git a/node_modules/tuf-js/dist/updater.d.ts b/node_modules/tuf-js/dist/updater.d.ts
new file mode 100644
index 0000000000000..e49dca22a43d3
--- /dev/null
+++ b/node_modules/tuf-js/dist/updater.d.ts
@@ -0,0 +1,33 @@
+import { BaseFetcher } from './fetcher';
+import { TargetFile } from './models/file';
+import { Config } from './utils/config';
+export interface UpdaterOptions {
+    metadataDir: string;
+    metadataBaseUrl: string;
+    targetDir?: string;
+    targetBaseUrl?: string;
+    fetcher?: BaseFetcher;
+    config?: Partial<Config>;
+}
+export declare class Updater {
+    private dir;
+    private metadataBaseUrl;
+    private targetDir?;
+    private targetBaseUrl?;
+    private trustedSet;
+    private config;
+    private fetcher;
+    constructor(options: UpdaterOptions);
+    refresh(): Promise<void>;
+    getTargetInfo(targetPath: string): Promise<TargetFile | undefined>;
+    downloadTarget(targetInfo: TargetFile, filePath?: string, targetBaseUrl?: string): Promise<string>;
+    findCachedTarget(targetInfo: TargetFile, filePath?: string): Promise<string | undefined>;
+    private loadLocalMetadata;
+    private loadRoot;
+    private loadTimestamp;
+    private loadSnapshot;
+    private loadTargets;
+    private preorderDepthFirstWalk;
+    private generateTargetPath;
+    private persistMetadata;
+}
diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js
new file mode 100644
index 0000000000000..9f33c667ce9b2
--- /dev/null
+++ b/node_modules/tuf-js/dist/updater.js
@@ -0,0 +1,306 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = void 0;
+const fs = __importStar(require("fs"));
+const path = __importStar(require("path"));
+const error_1 = require("./error");
+const fetcher_1 = require("./fetcher");
+const store_1 = require("./store");
+const config_1 = require("./utils/config");
+const types_1 = require("./utils/types");
+class Updater {
+    constructor(options) {
+        const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options;
+        this.dir = metadataDir;
+        this.metadataBaseUrl = metadataBaseUrl;
+        this.targetDir = targetDir;
+        this.targetBaseUrl = targetBaseUrl;
+        const data = this.loadLocalMetadata(types_1.MetadataKind.Root);
+        this.trustedSet = new store_1.TrustedMetadataStore(data);
+        this.config = { ...config_1.defaultConfig, ...config };
+        this.fetcher =
+            fetcher ||
+                new fetcher_1.Fetcher({
+                    timeout: this.config.fetchTimeout,
+                    retries: this.config.fetchRetries,
+                });
+    }
+    async refresh() {
+        await this.loadRoot();
+        await this.loadTimestamp();
+        await this.loadSnapshot();
+        await this.loadTargets(types_1.MetadataKind.Targets, types_1.MetadataKind.Root);
+    }
+    // Returns the TargetFile instance with information for the given target path.
+    //
+    // Implicitly calls refresh if it hasn't already been called.
+    async getTargetInfo(targetPath) {
+        if (!this.trustedSet.targets) {
+            await this.refresh();
+        }
+        return this.preorderDepthFirstWalk(targetPath);
+    }
+    async downloadTarget(targetInfo, filePath, targetBaseUrl) {
+        const targetPath = filePath || this.generateTargetPath(targetInfo);
+        if (!targetBaseUrl) {
+            if (!this.targetBaseUrl) {
+                throw new error_1.ValueError('Target base URL not set');
+            }
+            targetBaseUrl = this.targetBaseUrl;
+        }
+        let targetFilePath = targetInfo.path;
+        const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot;
+        if (consistentSnapshot && this.config.prefixTargetsWithHash) {
+            const hashes = Object.values(targetInfo.hashes);
+            const basename = path.basename(targetFilePath);
+            targetFilePath = `${hashes[0]}.${basename}`;
+        }
+        const url = path.join(targetBaseUrl, targetFilePath);
+        // Client workflow 5.7.3: download target file
+        await this.fetcher.downloadFile(url, targetInfo.length, async (fileName) => {
+            // Verify hashes and length of downloaded file
+            await targetInfo.verify(fs.createReadStream(fileName));
+            // Copy file to target path
+            fs.copyFileSync(fileName, targetPath);
+        });
+        return targetPath;
+    }
+    async findCachedTarget(targetInfo, filePath) {
+        if (!filePath) {
+            filePath = this.generateTargetPath(targetInfo);
+        }
+        try {
+            if (fs.existsSync(filePath)) {
+                targetInfo.verify(fs.createReadStream(filePath));
+                return filePath;
+            }
+        }
+        catch (error) {
+            return; // File not found
+        }
+        return; // File not found
+    }
+    loadLocalMetadata(fileName) {
+        const filePath = path.join(this.dir, `${fileName}.json`);
+        return fs.readFileSync(filePath);
+    }
+    // Sequentially load and persist on local disk every newer root metadata
+    // version available on the remote.
+    // Client workflow 5.3: update root role
+    async loadRoot() {
+        // Client workflow 5.3.2: version of trusted root metadata file
+        const rootVersion = this.trustedSet.root.signed.version;
+        const lowerBound = rootVersion + 1;
+        const upperBound = lowerBound + this.config.maxRootRotations;
+        for (let version = lowerBound; version <= upperBound; version++) {
+            const url = path.join(this.metadataBaseUrl, `${version}.root.json`);
+            try {
+                // Client workflow 5.3.3: download new root metadata file
+                const bytesData = await this.fetcher.downloadBytes(url, this.config.rootMaxLength);
+                // Client workflow 5.3.4 - 5.4.7
+                this.trustedSet.updateRoot(bytesData);
+                // Client workflow 5.3.8: persist root metadata file
+                this.persistMetadata(types_1.MetadataKind.Root, bytesData);
+            }
+            catch (error) {
+                break;
+            }
+        }
+    }
+    // Load local and remote timestamp metadata.
+    // Client workflow 5.4: update timestamp role
+    async loadTimestamp() {
+        // Load local and remote timestamp metadata
+        try {
+            const data = this.loadLocalMetadata(types_1.MetadataKind.Timestamp);
+            this.trustedSet.updateTimestamp(data);
+        }
+        catch (error) {
+            // continue
+        }
+        //Load from remote (whether local load succeeded or not)
+        const url = path.join(this.metadataBaseUrl, `timestamp.json`);
+        // Client workflow 5.4.1: download timestamp metadata file
+        const bytesData = await this.fetcher.downloadBytes(url, this.config.timestampMaxLength);
+        try {
+            // Client workflow 5.4.2 - 5.4.4
+            this.trustedSet.updateTimestamp(bytesData);
+        }
+        catch (error) {
+            // If new timestamp version is same as current, discardd the new one.
+            // This is normal and should NOT raise an error.
+            if (error instanceof error_1.EqualVersionError) {
+                return;
+            }
+            // Re-raise any other error
+            throw error;
+        }
+        // Client workflow 5.4.5: persist timestamp metadata
+        this.persistMetadata(types_1.MetadataKind.Timestamp, bytesData);
+    }
+    // Load local and remote snapshot metadata.
+    // Client workflow 5.5: update snapshot role
+    async loadSnapshot() {
+        //Load local (and if needed remote) snapshot metadata
+        try {
+            const data = this.loadLocalMetadata(types_1.MetadataKind.Snapshot);
+            this.trustedSet.updateSnapshot(data, true);
+        }
+        catch (error) {
+            if (!this.trustedSet.timestamp) {
+                throw new ReferenceError('No timestamp metadata');
+            }
+            const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta;
+            const maxLength = snapshotMeta.length || this.config.snapshotMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? snapshotMeta.version
+                : undefined;
+            const url = path.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : `snapshot.json`);
+            try {
+                // Client workflow 5.5.1: download snapshot metadata file
+                const bytesData = await this.fetcher.downloadBytes(url, maxLength);
+                // Client workflow 5.5.2 - 5.5.6
+                this.trustedSet.updateSnapshot(bytesData);
+                // Client workflow 5.5.7: persist snapshot metadata file
+                this.persistMetadata(types_1.MetadataKind.Snapshot, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`);
+            }
+        }
+    }
+    // Load local and remote targets metadata.
+    // Client workflow 5.6: update targets role
+    async loadTargets(role, parentRole) {
+        if (this.trustedSet.getRole(role)) {
+            return this.trustedSet.getRole(role);
+        }
+        try {
+            const buffer = this.loadLocalMetadata(role);
+            this.trustedSet.updateDelegatedTargets(buffer, role, parentRole);
+        }
+        catch (error) {
+            // Local 'role' does not exist or is invalid: update from remote
+            if (!this.trustedSet.snapshot) {
+                throw new ReferenceError('No snapshot metadata');
+            }
+            const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`];
+            // TODO: use length for fetching
+            const maxLength = metaInfo.length || this.config.targetsMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? metaInfo.version
+                : undefined;
+            const url = path.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`);
+            try {
+                // Client workflow 5.6.1: download targets metadata file
+                const bytesData = await this.fetcher.downloadBytes(url, maxLength);
+                // Client workflow 5.6.2 - 5.6.6
+                this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole);
+                // Client workflow 5.6.7: persist targets metadata file
+                this.persistMetadata(role, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load targets error ${error}`);
+            }
+        }
+        return this.trustedSet.getRole(role);
+    }
+    async preorderDepthFirstWalk(targetPath) {
+        // Interrogates the tree of target delegations in order of appearance
+        // (which implicitly order trustworthiness), and returns the matching
+        // target found in the most trusted role.
+        // List of delegations to be interrogated. A (role, parent role) pair
+        // is needed to load and verify the delegated targets metadata.
+        const delegationsToVisit = [
+            {
+                roleName: types_1.MetadataKind.Targets,
+                parentRoleName: types_1.MetadataKind.Root,
+            },
+        ];
+        const visitedRoleNames = new Set();
+        // Client workflow 5.6.7: preorder depth-first traversal of the graph of
+        // target delegations
+        while (visitedRoleNames.size <= this.config.maxDelegations &&
+            delegationsToVisit.length > 0) {
+            //  Pop the role name from the top of the stack.
+            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+            const { roleName, parentRoleName } = delegationsToVisit.pop();
+            // Skip any visited current role to prevent cycles.
+            // Client workflow 5.6.7.1: skip already-visited roles
+            if (visitedRoleNames.has(roleName)) {
+                continue;
+            }
+            // The metadata for 'role_name' must be downloaded/updated before
+            // its targets, delegations, and child roles can be inspected.
+            const targets = (await this.loadTargets(roleName, parentRoleName))
+                ?.signed;
+            if (!targets) {
+                continue;
+            }
+            const target = targets.targets?.[targetPath];
+            if (target) {
+                return target;
+            }
+            // After preorder check, add current role to set of visited roles.
+            visitedRoleNames.add(roleName);
+            if (targets.delegations) {
+                const childRolesToVisit = [];
+                // NOTE: This may be a slow operation if there are many delegated roles.
+                const rolesForTarget = targets.delegations.rolesForTarget(targetPath);
+                for (const { role: childName, terminating } of rolesForTarget) {
+                    childRolesToVisit.push({
+                        roleName: childName,
+                        parentRoleName: roleName,
+                    });
+                    // Client workflow 5.6.7.2.1
+                    if (terminating) {
+                        delegationsToVisit.splice(0); // empty the array
+                        break;
+                    }
+                }
+                childRolesToVisit.reverse();
+                delegationsToVisit.push(...childRolesToVisit);
+            }
+        }
+        return; // no matching target found
+    }
+    generateTargetPath(targetInfo) {
+        if (!this.targetDir) {
+            throw new error_1.ValueError('Target directory not set');
+        }
+        return path.join(this.targetDir, targetInfo.path);
+    }
+    async persistMetadata(metaDataName, bytesData) {
+        try {
+            const filePath = path.join(this.dir, `${metaDataName}.json`);
+            fs.writeFileSync(filePath, bytesData.toString('utf8'));
+        }
+        catch (error) {
+            throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`);
+        }
+    }
+}
+exports.Updater = Updater;
diff --git a/node_modules/tuf-js/dist/utils/config.d.ts b/node_modules/tuf-js/dist/utils/config.d.ts
new file mode 100644
index 0000000000000..2a906c7c28d86
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/config.d.ts
@@ -0,0 +1,12 @@
+export declare const defaultConfig: {
+    maxRootRotations: number;
+    maxDelegations: number;
+    rootMaxLength: number;
+    timestampMaxLength: number;
+    snapshotMaxLength: number;
+    targetsMaxLength: number;
+    prefixTargetsWithHash: boolean;
+    fetchTimeout: number;
+    fetchRetries: number;
+};
+export type Config = typeof defaultConfig;
diff --git a/node_modules/tuf-js/dist/utils/config.js b/node_modules/tuf-js/dist/utils/config.js
new file mode 100644
index 0000000000000..c2d970e256244
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/config.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.defaultConfig = void 0;
+exports.defaultConfig = {
+    maxRootRotations: 32,
+    maxDelegations: 32,
+    rootMaxLength: 512000,
+    timestampMaxLength: 16384,
+    snapshotMaxLength: 2000000,
+    targetsMaxLength: 5000000,
+    prefixTargetsWithHash: true,
+    fetchTimeout: 100000,
+    fetchRetries: 2,
+};
diff --git a/node_modules/tuf-js/dist/utils/guard.d.ts b/node_modules/tuf-js/dist/utils/guard.d.ts
new file mode 100644
index 0000000000000..17bc4ce3c7ea5
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/guard.d.ts
@@ -0,0 +1,8 @@
+import { JSONObject, MetadataKind } from './types';
+export declare function isDefined<T>(val: T | undefined): val is T;
+export declare function isObject(value: unknown): value is JSONObject;
+export declare function isStringArray(value: unknown): value is string[];
+export declare function isObjectArray(value: unknown): value is JSONObject[];
+export declare function isStringRecord(value: unknown): value is Record<string, string>;
+export declare function isObjectRecord(value: unknown): value is Record<string, JSONObject>;
+export declare function isMetadataKind(value: unknown): value is MetadataKind;
diff --git a/node_modules/tuf-js/dist/utils/guard.js b/node_modules/tuf-js/dist/utils/guard.js
new file mode 100644
index 0000000000000..f2207af18690a
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/guard.js
@@ -0,0 +1,39 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isMetadataKind = exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0;
+const types_1 = require("./types");
+function isDefined(val) {
+    return val !== undefined;
+}
+exports.isDefined = isDefined;
+function isObject(value) {
+    return typeof value === 'object' && value !== null;
+}
+exports.isObject = isObject;
+function isStringArray(value) {
+    return Array.isArray(value) && value.every((v) => typeof v === 'string');
+}
+exports.isStringArray = isStringArray;
+function isObjectArray(value) {
+    return Array.isArray(value) && value.every(isObject);
+}
+exports.isObjectArray = isObjectArray;
+function isStringRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'string'));
+}
+exports.isStringRecord = isStringRecord;
+function isObjectRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'object' && v !== null));
+}
+exports.isObjectRecord = isObjectRecord;
+function isMetadataKind(value) {
+    return (typeof value === 'string' &&
+        Object.values(types_1.MetadataKind).includes(value));
+}
+exports.isMetadataKind = isMetadataKind;
diff --git a/node_modules/tuf-js/dist/utils/index.d.ts b/node_modules/tuf-js/dist/utils/index.d.ts
new file mode 100644
index 0000000000000..e2232bc5cceab
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/index.d.ts
@@ -0,0 +1,5 @@
+export * as config from './config';
+export * as guard from './guard';
+export * as json from './json';
+export * as signer from './signer';
+export * as types from './types';
diff --git a/node_modules/tuf-js/dist/utils/index.js b/node_modules/tuf-js/dist/utils/index.js
new file mode 100644
index 0000000000000..604696a30565b
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/index.js
@@ -0,0 +1,31 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.types = exports.signer = exports.json = exports.guard = exports.config = void 0;
+exports.config = __importStar(require("./config"));
+exports.guard = __importStar(require("./guard"));
+exports.json = __importStar(require("./json"));
+exports.signer = __importStar(require("./signer"));
+exports.types = __importStar(require("./types"));
diff --git a/node_modules/tuf-js/dist/utils/json.d.ts b/node_modules/tuf-js/dist/utils/json.d.ts
new file mode 100644
index 0000000000000..ecddbee17c446
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/json.d.ts
@@ -0,0 +1,2 @@
+/// <reference types="node" />
+export declare function canonicalize(object: any): Buffer;
diff --git a/node_modules/tuf-js/dist/utils/json.js b/node_modules/tuf-js/dist/utils/json.js
new file mode 100644
index 0000000000000..30f82ea4c3d84
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/json.js
@@ -0,0 +1,62 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = void 0;
+const QUOTATION_MARK = Buffer.from('"');
+const COMMA = Buffer.from(',');
+const COLON = Buffer.from(':');
+const LEFT_SQUARE_BRACKET = Buffer.from('[');
+const RIGHT_SQUARE_BRACKET = Buffer.from(']');
+const LEFT_CURLY_BRACKET = Buffer.from('{');
+const RIGHT_CURLY_BRACKET = Buffer.from('}');
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+    let buffer = Buffer.from('');
+    if (object === null || typeof object !== 'object' || object.toJSON != null) {
+        // Primitives or toJSONable objects
+        if (typeof object === 'string') {
+            buffer = Buffer.concat([
+                buffer,
+                QUOTATION_MARK,
+                Buffer.from(object),
+                QUOTATION_MARK,
+            ]);
+        }
+        else {
+            buffer = Buffer.concat([buffer, Buffer.from(JSON.stringify(object))]);
+        }
+    }
+    else if (Array.isArray(object)) {
+        // Array - maintain element order
+        buffer = Buffer.concat([buffer, LEFT_SQUARE_BRACKET]);
+        let first = true;
+        object.forEach((element) => {
+            if (!first) {
+                buffer = Buffer.concat([buffer, COMMA]);
+            }
+            first = false;
+            // recursive call
+            buffer = Buffer.concat([buffer, canonicalize(element)]);
+        });
+        buffer = Buffer.concat([buffer, RIGHT_SQUARE_BRACKET]);
+    }
+    else {
+        // Object - Sort properties before serializing
+        buffer = Buffer.concat([buffer, LEFT_CURLY_BRACKET]);
+        let first = true;
+        Object.keys(object)
+            .sort()
+            .forEach((property) => {
+            if (!first) {
+                buffer = Buffer.concat([buffer, COMMA]);
+            }
+            first = false;
+            buffer = Buffer.concat([buffer, Buffer.from(JSON.stringify(property))]);
+            buffer = Buffer.concat([buffer, COLON]);
+            // recursive call
+            buffer = Buffer.concat([buffer, canonicalize(object[property])]);
+        });
+        buffer = Buffer.concat([buffer, RIGHT_CURLY_BRACKET]);
+    }
+    return buffer;
+}
+exports.canonicalize = canonicalize;
diff --git a/node_modules/tuf-js/dist/utils/key.d.ts b/node_modules/tuf-js/dist/utils/key.d.ts
new file mode 100644
index 0000000000000..7b631281a3408
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/key.d.ts
@@ -0,0 +1,9 @@
+/// <reference types="node" />
+import { VerifyKeyObjectInput } from 'crypto';
+interface KeyInfo {
+    keyType: string;
+    scheme: string;
+    keyVal: string;
+}
+export declare function getPublicKey(keyInfo: KeyInfo): VerifyKeyObjectInput;
+export {};
diff --git a/node_modules/tuf-js/dist/utils/key.js b/node_modules/tuf-js/dist/utils/key.js
new file mode 100644
index 0000000000000..1f795ba1a2733
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/key.js
@@ -0,0 +1,143 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getPublicKey = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const error_1 = require("../error");
+const oid_1 = require("./oid");
+const ASN1_TAG_SEQUENCE = 0x30;
+const ANS1_TAG_BIT_STRING = 0x03;
+const NULL_BYTE = 0x00;
+const OID_EDDSA = '1.3.101.112';
+const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
+const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
+const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
+function getPublicKey(keyInfo) {
+    switch (keyInfo.keyType) {
+        case 'rsa':
+            return getRSAPublicKey(keyInfo);
+        case 'ed25519':
+            return getED25519PublicKey(keyInfo);
+        case 'ecdsa':
+        case 'ecdsa-sha2-nistp256':
+        case 'ecdsa-sha2-nistp384':
+            return getECDCSAPublicKey(keyInfo);
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
+    }
+}
+exports.getPublicKey = getPublicKey;
+function getRSAPublicKey(keyInfo) {
+    // Only support PEM-encoded RSA keys
+    if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        throw new error_1.CryptoError('Invalid key format');
+    }
+    const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    switch (keyInfo.scheme) {
+        case 'rsassa-pss-sha256':
+            return {
+                key: key,
+                padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
+            };
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
+    }
+}
+function getED25519PublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ed25519.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+function getECDCSAPublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ecdsa.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+const ed25519 = {
+    // Translates a hex key into a crypto KeyObject
+    // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
+        // Create a byte sequence containing the OID and key
+        const elements = Buffer.concat([
+            Buffer.concat([
+                Buffer.from([ASN1_TAG_SEQUENCE]),
+                Buffer.from([oid.length]),
+                oid,
+            ]),
+            Buffer.concat([
+                Buffer.from([ANS1_TAG_BIT_STRING]),
+                Buffer.from([key.length + 1]),
+                Buffer.from([NULL_BYTE]),
+                key,
+            ]),
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([elements.length]),
+            elements,
+        ]);
+        return der;
+    },
+};
+const ecdsa = {
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const bitString = Buffer.concat([
+            Buffer.from([ANS1_TAG_BIT_STRING]),
+            Buffer.from([key.length + 1]),
+            Buffer.from([NULL_BYTE]),
+            key,
+        ]);
+        const oids = Buffer.concat([
+            (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
+            (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
+        ]);
+        const oidSequence = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oids.length]),
+            oids,
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oidSequence.length + bitString.length]),
+            oidSequence,
+            bitString,
+        ]);
+        return der;
+    },
+};
+const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/tuf-js/dist/utils/oid.d.ts b/node_modules/tuf-js/dist/utils/oid.d.ts
new file mode 100644
index 0000000000000..f20456a978f0e
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/oid.d.ts
@@ -0,0 +1,2 @@
+/// <reference types="node" />
+export declare function encodeOIDString(oid: string): Buffer;
diff --git a/node_modules/tuf-js/dist/utils/oid.js b/node_modules/tuf-js/dist/utils/oid.js
new file mode 100644
index 0000000000000..e1bb7af5e54fb
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/oid.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeOIDString = void 0;
+const ANS1_TAG_OID = 0x06;
+function encodeOIDString(oid) {
+    const parts = oid.split('.');
+    // The first two subidentifiers are encoded into the first byte
+    const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
+    const rest = [];
+    parts.slice(2).forEach((part) => {
+        const bytes = encodeVariableLengthInteger(parseInt(part, 10));
+        rest.push(...bytes);
+    });
+    const der = Buffer.from([first, ...rest]);
+    return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
+}
+exports.encodeOIDString = encodeOIDString;
+function encodeVariableLengthInteger(value) {
+    const bytes = [];
+    let mask = 0x00;
+    while (value > 0) {
+        bytes.unshift((value & 0x7f) | mask);
+        value >>= 7;
+        mask = 0x80;
+    }
+    return bytes;
+}
diff --git a/node_modules/tuf-js/dist/utils/signer.d.ts b/node_modules/tuf-js/dist/utils/signer.d.ts
new file mode 100644
index 0000000000000..376ef113c4911
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/signer.d.ts
@@ -0,0 +1,3 @@
+import crypto from 'crypto';
+import { JSONObject } from '../utils/types';
+export declare const verifySignature: (metaDataSignedData: JSONObject, key: crypto.VerifyKeyObjectInput, signature: string) => boolean;
diff --git a/node_modules/tuf-js/dist/utils/signer.js b/node_modules/tuf-js/dist/utils/signer.js
new file mode 100644
index 0000000000000..d3b2e7515d7f9
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/signer.js
@@ -0,0 +1,13 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySignature = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const json_1 = require("./json");
+const verifySignature = (metaDataSignedData, key, signature) => {
+    const canonicalData = (0, json_1.canonicalize)(metaDataSignedData) || '';
+    return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
+};
+exports.verifySignature = verifySignature;
diff --git a/node_modules/tuf-js/dist/utils/tmpfile.d.ts b/node_modules/tuf-js/dist/utils/tmpfile.d.ts
new file mode 100644
index 0000000000000..4d5ee8abb84a6
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/tmpfile.d.ts
@@ -0,0 +1,3 @@
+type TempFileHandler<T> = (file: string) => Promise<T>;
+export declare const withTempFile: <T>(handler: TempFileHandler<T>) => Promise<T>;
+export {};
diff --git a/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/tuf-js/dist/utils/tmpfile.js
new file mode 100644
index 0000000000000..923eef6044bcc
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/tmpfile.js
@@ -0,0 +1,25 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.withTempFile = void 0;
+const promises_1 = __importDefault(require("fs/promises"));
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+// Invokes the given handler with the path to a temporary file. The file
+// is deleted after the handler returns.
+const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile')));
+exports.withTempFile = withTempFile;
+// Invokes the given handler with a temporary directory. The directory is
+// deleted after the handler returns.
+const withTempDir = async (handler) => {
+    const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir());
+    const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep);
+    try {
+        return await handler(dir);
+    }
+    finally {
+        await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 });
+    }
+};
diff --git a/node_modules/tuf-js/dist/utils/types.d.ts b/node_modules/tuf-js/dist/utils/types.d.ts
new file mode 100644
index 0000000000000..24319ddf7bb6b
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/types.d.ts
@@ -0,0 +1,10 @@
+export declare enum MetadataKind {
+    Root = "root",
+    Timestamp = "timestamp",
+    Snapshot = "snapshot",
+    Targets = "targets"
+}
+export type JSONObject = {
+    [key: string]: JSONValue;
+};
+export type JSONValue = null | boolean | number | string | JSONValue[] | JSONObject;
diff --git a/node_modules/tuf-js/dist/utils/types.js b/node_modules/tuf-js/dist/utils/types.js
new file mode 100644
index 0000000000000..469f580743f65
--- /dev/null
+++ b/node_modules/tuf-js/dist/utils/types.js
@@ -0,0 +1,10 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MetadataKind = void 0;
+var MetadataKind;
+(function (MetadataKind) {
+    MetadataKind["Root"] = "root";
+    MetadataKind["Timestamp"] = "timestamp";
+    MetadataKind["Snapshot"] = "snapshot";
+    MetadataKind["Targets"] = "targets";
+})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {}));
diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json
new file mode 100644
index 0000000000000..758e71223e40f
--- /dev/null
+++ b/node_modules/tuf-js/package.json
@@ -0,0 +1,58 @@
+{
+  "name": "tuf-js",
+  "version": "1.0.0",
+  "description": "JavaScript implementation of The Update Framework (TUF)",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc",
+    "test": "jest",
+    "test:watch": "jest --watch",
+    "test:ci": "jest --maxWorkers=2 --coverage",
+    "lint": "eslint --fix --ext .ts src/**",
+    "lint:check": "eslint --max-warnings 0 --ext .ts src/**",
+    "format": "prettier --write \"src/**/*\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/github/tuf-js.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "keywords": [
+    "tuf"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/github/tuf-js/issues"
+  },
+  "homepage": "https://github.com/github/tuf-js#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/jest": "^28.1.8",
+    "@types/lodash.isequal": "^4.5.6",
+    "@types/make-fetch-happen": "^10.0.1",
+    "@types/minimatch": "^5.1.2",
+    "@types/node": "^18.11.10",
+    "@typescript-eslint/eslint-plugin": "^5.45.0",
+    "@typescript-eslint/parser": "^5.45.0",
+    "eslint": "^8.28.0",
+    "eslint-config-prettier": "^8.5.0",
+    "eslint-plugin-prettier": "^4.2.1",
+    "http-server": "^14.1.1",
+    "jest": "^28.1.3",
+    "nock": "^13.2.9",
+    "prettier": "^2.8.0",
+    "ts-jest": "^28.0.8",
+    "typescript": "^4.9.3"
+  },
+  "dependencies": {
+    "make-fetch-happen": "^11.0.1",
+    "minimatch": "^6.1.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 0c5fbdca5ac38..f7f2ff595b3fe 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11138,6 +11138,21 @@
       "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "inBundle": true
     },
+    "node_modules/sigstore": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.0.0.tgz",
+      "integrity": "sha512-e+qfbn/zf1+rCza/BhIA//Awmf0v1pa5HQS8Xk8iXrn9bgytytVLqYD0P7NSqZ6IELTgq+tcDvLPkQjNHyWLNg==",
+      "dependencies": {
+        "make-fetch-happen": "^11.0.1",
+        "tuf-js": "^1.0.0"
+      },
+      "bin": {
+        "sigstore": "bin/sigstore.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/slash": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
@@ -14046,6 +14061,18 @@
         "node": ">=4"
       }
     },
+    "node_modules/tuf-js": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.0.0.tgz",
+      "integrity": "sha512-1dxsQwESDzACJjTdYHQ4wJ1f/of7jALWKfJEHSBWUQB/5UTJUx9SW6GHXp4mZ1KvdBRJCpGjssoPFGi4hvw8/A==",
+      "dependencies": {
+        "make-fetch-happen": "^11.0.1",
+        "minimatch": "^6.1.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/tunnel": {
       "version": "0.0.6",
       "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
@@ -15145,10 +15172,12 @@
       "version": "7.0.8",
       "license": "ISC",
       "dependencies": {
+        "ci-info": "^3.6.1",
         "normalize-package-data": "^5.0.0",
         "npm-package-arg": "^10.1.0",
         "npm-registry-fetch": "^14.0.3",
         "semver": "^7.3.7",
+        "sigstore": "^1.0.0",
         "ssri": "^10.0.1"
       },
       "devDependencies": {
diff --git a/tap-snapshots/test/lib/commands/config.js.test.cjs b/tap-snapshots/test/lib/commands/config.js.test.cjs
index 43db223be31c5..eee1dd569dc40 100644
--- a/tap-snapshots/test/lib/commands/config.js.test.cjs
+++ b/tap-snapshots/test/lib/commands/config.js.test.cjs
@@ -116,6 +116,7 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
   "preid": "",
   "production": null,
   "progress": true,
+  "provenance": false,
   "proxy": null,
   "read-only": false,
   "rebuild-bundle": true,
@@ -270,6 +271,7 @@ prefer-online = false
 preid = "" 
 production = null 
 progress = true 
+provenance = false 
 proxy = null 
 read-only = false 
 rebuild-bundle = true 
diff --git a/tap-snapshots/test/lib/docs.js.test.cjs b/tap-snapshots/test/lib/docs.js.test.cjs
index a4801df7470fe..a07aab8dd9757 100644
--- a/tap-snapshots/test/lib/docs.js.test.cjs
+++ b/tap-snapshots/test/lib/docs.js.test.cjs
@@ -1467,6 +1467,13 @@ operations, if \`process.stderr\` is a TTY.
 
 Set to \`false\` to suppress the progress bar.
 
+#### \`provenance\`
+
+* Default: false
+* Type: Boolean
+
+Indicates that a provenance statement should be generated.
+
 #### \`proxy\`
 
 * Default: null
@@ -2219,6 +2226,7 @@ Array [
   "preid",
   "production",
   "progress",
+  "provenance",
   "proxy",
   "read-only",
   "rebuild-bundle",
@@ -2353,6 +2361,7 @@ Array [
   "preid",
   "production",
   "progress",
+  "provenance",
   "proxy",
   "read-only",
   "rebuild-bundle",
@@ -3745,7 +3754,7 @@ npm publish <package-spec>
 Options:
 [--tag <tag>] [--access <restricted|public>] [--dry-run] [--otp <otp>]
 [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]]
-[-ws|--workspaces] [--include-workspace-root]
+[-ws|--workspaces] [--include-workspace-root] [--provenance]
 
 Run "npm help publish" for more info
 
@@ -3760,6 +3769,7 @@ npm publish <package-spec>
 #### \`workspace\`
 #### \`workspaces\`
 #### \`include-workspace-root\`
+#### \`provenance\`
 `
 
 exports[`test/lib/docs.js TAP usage query > must match snapshot 1`] = `
diff --git a/workspaces/libnpmpublish/lib/provenance.js b/workspaces/libnpmpublish/lib/provenance.js
new file mode 100644
index 0000000000000..d11d210478b65
--- /dev/null
+++ b/workspaces/libnpmpublish/lib/provenance.js
@@ -0,0 +1,70 @@
+const { sigstore } = require('sigstore')
+
+const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
+const INTOTO_STATEMENT_TYPE = 'https://in-toto.io/Statement/v0.1'
+const SLSA_PREDICATE_TYPE = 'https://slsa.dev/provenance/v0.2'
+
+const BUILDER_ID_PREFIX = 'https://github.com/npm/cli'
+const BUILD_TYPE_PREFIX = 'https://github.com/npm/cli/gha'
+const BUILD_TYPE_VERSION = 'v1'
+
+const generateProvenance = async (subject, opts) => {
+  const { env } = process
+  const payload = {
+    _type: INTOTO_STATEMENT_TYPE,
+    subject,
+    predicateType: SLSA_PREDICATE_TYPE,
+    predicate: {
+      buildType: `${BUILD_TYPE_PREFIX}@${BUILD_TYPE_VERSION}`,
+      builder: { id: `${BUILDER_ID_PREFIX}@${opts.npmVersion}` },
+      invocation: {
+        configSource: {
+          uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`,
+          digest: {
+            sha1: env.GITHUB_SHA,
+          },
+          entryPoint: env.GITHUB_WORKFLOW_REF,
+        },
+        parameters: {},
+        environment: {
+          GITHUB_ACTOR_ID: env.GITHUB_ACTOR_ID,
+          GITHUB_EVENT_NAME: env.GITHUB_EVENT_NAME,
+          GITHUB_REF: env.GITHUB_REF,
+          GITHUB_REF_TYPE: env.GITHUB_REF_TYPE,
+          GITHUB_REPOSITORY: env.GITHUB_REPOSITORY,
+          GITHUB_REPOSITORY_ID: env.GITHUB_REPOSITORY_ID,
+          GITHUB_REPOSITORY_OWNER_ID: env.GITHUB_REPOSITORY_OWNER_ID,
+          GITHUB_RUN_ATTEMPT: env.GITHUB_RUN_ATTEMPT,
+          GITHUB_RUN_ID: env.GITHUB_RUN_ID,
+          GITHUB_RUN_NUMBER: env.GITHUB_RUN_NUMBER,
+          GITHUB_SHA: env.GITHUB_SHA,
+          GITHUB_WORKFLOW_REF: env.GITHUB_WORKFLOW_REF,
+          GITHUB_WORKFLOW_SHA: env.GITHUB_WORKFLOW_SHA,
+        },
+      },
+      metadata: {
+        buildInvocationId: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`,
+        completeness: {
+          parameters: false,
+          environment: false,
+          materials: false,
+        },
+        reproducible: false,
+      },
+      materials: [
+        {
+          uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}`,
+          digest: {
+            sha1: env.GITHUB_SHA,
+          },
+        },
+      ],
+    },
+  }
+
+  return sigstore.attest(Buffer.from(JSON.stringify(payload)), INTOTO_PAYLOAD_TYPE, opts)
+}
+
+module.exports = {
+  generateProvenance,
+}
diff --git a/workspaces/libnpmpublish/lib/publish.js b/workspaces/libnpmpublish/lib/publish.js
index 7d01fabf1f2b4..353688a10eac1 100644
--- a/workspaces/libnpmpublish/lib/publish.js
+++ b/workspaces/libnpmpublish/lib/publish.js
@@ -4,6 +4,9 @@ const npa = require('npm-package-arg')
 const semver = require('semver')
 const { URL } = require('url')
 const ssri = require('ssri')
+const ciInfo = require('ci-info')
+
+const { generateProvenance } = require('./provenance')
 
 const publish = async (manifest, tarballData, opts) => {
   if (manifest.private) {
@@ -36,7 +39,7 @@ Remove the 'private' field from the package.json to publish it.`),
     )
   }
 
-  const metadata = buildMetadata(reg, pubManifest, tarballData, opts)
+  const metadata = await buildMetadata(reg, pubManifest, tarballData, spec, opts)
 
   try {
     return await npmFetch(spec.escapedName, {
@@ -89,8 +92,8 @@ const patchManifest = (_manifest, opts) => {
   return manifest
 }
 
-const buildMetadata = (registry, manifest, tarballData, opts) => {
-  const { access, defaultTag, algorithms } = opts
+const buildMetadata = async (registry, manifest, tarballData, spec, opts) => {
+  const { access, defaultTag, algorithms, provenance } = opts
   const root = {
     _id: manifest.name,
     name: manifest.name,
@@ -105,6 +108,7 @@ const buildMetadata = (registry, manifest, tarballData, opts) => {
   root['dist-tags'][tag] = manifest.version
 
   const tarballName = `${manifest.name}-${manifest.version}.tgz`
+  const provenanceBundleName = `${manifest.name}-${manifest.version}.sigstore`
   const tarballURI = `${manifest.name}/-/${tarballName}`
   const integrity = ssri.fromData(tarballData, {
     algorithms: [...new Set(['sha1'].concat(algorithms))],
@@ -130,6 +134,41 @@ const buildMetadata = (registry, manifest, tarballData, opts) => {
     length: tarballData.length,
   }
 
+  // Handle case where --provenance flag was set to true
+  if (provenance === true) {
+    const subject = {
+      name: npa.toPurl(spec),
+      digest: { sha512: integrity.sha512[0].hexDigest() },
+    }
+
+    // Ensure that we're running in GHA and an OIDC token is available,
+    // currently the only supported build environment
+    if (ciInfo.name !== 'GitHub Actions' || !process.env.ACTIONS_ID_TOKEN_REQUEST_URL) {
+      throw Object.assign(
+        new Error('Automatic provenance generation not supported outside of GitHub Actions'),
+        { code: 'EUSAGE' }
+      )
+    }
+
+    const visibility =
+      await npmFetch.json(`${registry}/-/package/${spec.escapedName}/visibility`, opts)
+    if (!visibility.public && opts.provenance === true && opts.access !== 'public') {
+      throw Object.assign(
+        /* eslint-disable-next-line max-len */
+        new Error("Can't generate provenance for new or private package, you must set `access` to public."),
+        { code: 'EUSAGE' }
+      )
+    }
+    const provenanceBundle = await generateProvenance([subject], opts)
+
+    const serializedBundle = JSON.stringify(provenanceBundle)
+    root._attachments[provenanceBundleName] = {
+      content_type: provenanceBundle.mediaType,
+      data: serializedBundle,
+      length: serializedBundle.length,
+    }
+  }
+
   return root
 }
 
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 5f16896028df3..798287d124f5c 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -38,10 +38,12 @@
   "bugs": "https://github.com/npm/cli/issues",
   "homepage": "https://npmjs.com/package/libnpmpublish",
   "dependencies": {
+    "ci-info": "^3.6.1",
     "normalize-package-data": "^5.0.0",
     "npm-package-arg": "^10.1.0",
     "npm-registry-fetch": "^14.0.3",
     "semver": "^7.3.7",
+    "sigstore": "^1.0.0",
     "ssri": "^10.0.1"
   },
   "engines": {
diff --git a/workspaces/libnpmpublish/test/fixtures/bad-bundle.json b/workspaces/libnpmpublish/test/fixtures/bad-bundle.json
new file mode 100644
index 0000000000000..963c6f52d8a26
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/bad-bundle.json
@@ -0,0 +1 @@
+{"this: is not [valid 'json}
diff --git a/workspaces/libnpmpublish/test/fixtures/bad-dsse-payload-bundle.json b/workspaces/libnpmpublish/test/fixtures/bad-dsse-payload-bundle.json
new file mode 100644
index 0000000000000..8f667d60258e9
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/bad-dsse-payload-bundle.json
@@ -0,0 +1,51 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8538825",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670349482",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEUCIQCvI6QcaJeR/eVj/q4/rCu5D6JzcGvzGvTsBHCu/6VzzwIgS9tq7zm4DIFa5CsTYpXAudhCiKbxH3GGsStsoTS54K0="
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU52VkVORFFXbGxaMEYzU1VKQlowbFZSMUJaV1VOV1YzWjJVVGN6YlZJNFlWbzBSa3BoYUUxb2RqVXdkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5WR014VDBSQmVWZG9ZMDVOYWtsNFRXcEJNazFVWjNkUFJFRjVWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWbmFuZHViM0JLYVRaV0swdzNZbVkwY0VWRU9HRndUV3R2ZVdka1pVZHVhblZ1TlZRS1REUk1SMGhNYUhGQ2VEbDNiMHRFUm01VUswNUVjRk5IYm1FMlVHZzVRMGM1V1dsb01qVmxSRWxwWkZVMWJUaDRVR0ZQUTBGVldYZG5aMFpEVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWUGJ6SlZDa2hVZW1sM1pERTBiMFp0TVc1aGVFeHVkbVV4VTI5amQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIweENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklNRVZsZDBJMVFVaGpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaS1dtOEtZM2RCUVVKQlRVRlRSRUpIUVdsRlFUQm5LM0paTnk5d2JHbE5ZMWxzV1hwV2NFeHpUVUYwVDFWbFJIaHhUMEUzUnpCUGJEVTBjRnBuTlhORFNWRkRSd3AyU0VKQmJsUkJOakpQUlVFeE4yUm1SRUl2TVhoQ05HUnBWVmNyUkROelZXUmpVR2x2YkhaRFRtcEJTMEpuWjNGb2EycFBVRkZSUkVGM1RtOUJSRUpzQ2tGcVJVRTRlbWtyU2s1eVQwVTNRa1lyVFVneldrOHlVbHA2TkhOdFRVbFRlRUZ6YlhaS2QybFNaRTFWS3pablZqRlRaelJ6T0VGalZXTnZLMWs0T1U0S1RuRkxha0ZxUW5BclUxZElUQzh2T0haUmEydDVlRlI0T1RsUGVubzJPVk5GYVVsWE1FdEpkMmhNZFhjNVJqVk9jMGh2U210NFkwTjJVemx4WkdNdmNBcFpjRkJSYlM5RlBRb3RMUzB0TFVWT1JDQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENnPT0iLCJzaWciOiJUVVZWUTBsUlJFcHBUVmRvSzFWM2NtbzNVVkpFV1ZoTk1XcEVORXRKYVV0SFFrMUdUbTF5TjBSUVJrcENSVk5WUzJkSloxVlBPV3hoYlVFMmFEZHhTWFJWT0RWQ2VtZEdVVU5uZEVKSWMyeDVVRTkyYldGclpEVlpha2MxY1c4OSJ9XX0sImhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI1ZjM0ZDEyNzg4OWFjYWMwY2ZhNmNmMjk4NmY5N2E4ZGYzNmEyNDQ4YzU2Nzc4ZjVmMDZjZWYzYzIxYjQyNDlmIn0sInBheWxvYWRIYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiZDk0ZTg0N2VjOWJkODJmODNjODU4MzgwNjc5MzU4ZTFiYmExZGVmYmI5NTdmN2FlYjIwNzZkY2RkNTFiYTg1NCJ9fX19"
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICoTCCAiegAwIBAgIUGPYYCVWvvQ73mR8aZ4FJahMhv50wCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MTc1ODAyWhcNMjIxMjA2MTgwODAyWjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgjwnopJi6V+L7bf4pED8apMkoygdeGnjun5TL4LGHLhqBx9woKDFnT+NDpSGna6Ph9CG9Yih25eDIidU5m8xPaOCAUYwggFCMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUOo2UHTziwd14oFm1naxLnve1SocwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGLBgorBgEEAdZ5AgQCBH0EewB5AHcA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6JZocwAABAMASDBGAiEA0g+rY7/pliMcYlYzVpLsMAtOUeDxqOA7G0Ol54pZg5sCIQCGvHBAnTA62OEA17dfDB/1xB4diUW+D3sUdcPiolvCNjAKBggqhkjOPQQDAwNoADBlAjEA8zi+JNrOE7BF+MH3ZO2RZz4smMISxAsmvJwiRdMU+6gV1Sg4s8AcUco+Y89NNqKjAjBp+SWHL//8vQkkyxTx99Ozz69SEiIW0KIwhLuw9F5NsHoJkxcCvS9qdc/pYpPQm/E="
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    },
+    "dsseEnvelope": {
+      "payload": "eyJ0aGlzOiBpcyBub3QgW3ZhbGlkICdqc29ufQo=",
+      "payloadType": "application/vnd.in-toto+json",
+      "signatures": [
+        {
+          "sig": "MEUCIQDJiMWh+Uwrj7QRDYXM1jD4KIiKGBMFNmr7DPFJBESUKgIgUO9lamA6h7qItU85BzgFQCgtBHslyPOvmakd5YjG5qo=",
+          "keyid": ""
+        }
+      ]
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/digest-mismatch-provenance-bundle.json b/workspaces/libnpmpublish/test/fixtures/digest-mismatch-provenance-bundle.json
new file mode 100644
index 0000000000000..12fc00c525f35
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/digest-mismatch-provenance-bundle.json
@@ -0,0 +1,51 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8539273",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670350112",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEYCIQC9AAYrPMF/QP6ueHKhAE0pijRutfC4tWYUI/HEg3F+2AIhAMRB7Gs8sjOEXv/5nou+6w3r3897Brw2cQuk0YBKLGC1"
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU52YWtORFFXbGxaMEYzU1VKQlowbFZSRUpXWjFoV2MxbGpRM0JaYldSQlRrMW1hRlpuY0ZZNU5WSm5kME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5WR2QzVDBSTmVWZG9ZMDVOYWtsNFRXcEJNazFVWjNoUFJFMTVWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVVNWRHSXZNRzFLY0V4TVNIVnFNV1pHUmtkc2RrdHBhRkpUVjBGMFNrWm5Ua1IxT0VVS1dTOUdORTB4UVRRck9EWmFabk5CTkc5aFN6UnBVMnhRY0U1dFZreElXVTVyT0dKS2NGUkJZVmxyZFVNNGR6UlZPVFpQUTBGVldYZG5aMFpEVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZVNGRXeEVDbTlxZVhNdk9ISkpjRGMyVlZSTmFFZG1NMmQ0WWpaUmQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIweENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklNRVZsZDBJMVFVaGpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaTFFVWUtaVkZCUVVKQlRVRlRSRUpIUVdsRlFYTXdhR2R6VlZwdmFHVTRkbWxoVVV0alUzRjNSVkJGVmk5QmVuVnJORkV3TW1ZcmQxaHFNVk5IZDJORFNWRkVMd3BrV2pSYVFrNVdWekpGTkRSdmJtOU5RalowWkdsWFVubHhTM3BSZEhONFJEUlZVR0ZpVERkd1FYcEJTMEpuWjNGb2EycFBVRkZSUkVGM1RuQkJSRUp0Q2tGcVJVRnZaRXR0UnpKa1lYcHVaRlZtTUVObFUybHFSa3BaY1ZsaWR6Z3ZPWFF3VW5BemRXbFhkWEpIV0RCa1RreHBNbWw1YlN0S2JsQjVieXROU0ZFS2EwTmhjVUZxUlVGNmFuUTRReTlVVGtwWU1tNXhXSEEwYzNCeE9HSlFkRTV0U2t4R1pIRjNUbE00WTJaMVRXWnRiMU51VVZnMFNtSnpkVUUzUkdkQmFRcHJSa1pNTjFCWlN3b3RMUzB0TFVWT1JDQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENnPT0iLCJzaWciOiJUVVZaUTBsUlJERmhiR2h4TjJkT1FqaFNaMmQ0UmtseFpGUkZVVFJVU1dsNFpXVXhSRWRpTjJSc1JGTTJlVkpoTkZGSmFFRlFWbEpUU0U5Wk4wMXFXRWwwWW1NclVsTTFRbVZ6WVhWeWRVUnhUREp0WXpoSFQxbGtkVzFvUnpSdyJ9XX0sImhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIxYzE5MmE5OWQ5N2ZiNGJhZDEzOWZhNWIyMTQ4MjMyZGE4MWMwNjg4YjNjYzEyMzMwYjkyNDc0M2YwNGY3YmZkIn0sInBheWxvYWRIYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiYmYxOThmMjNmYTc0Yzg0OTEzYTRjMjU1ZTk0YWNhYWE0NGFlODJiYjM0ZWEyOGM5Y2E2Y2U1YWYzNWY2YmE2YSJ9fX19"
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICojCCAiegAwIBAgIUDBVgXVsYcCpYmdANMfhVgpV95RgwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MTgwODMyWhcNMjIxMjA2MTgxODMyWjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9tb/0mJpLLHuj1fFFGlvKihRSWAtJFgNDu8EY/F4M1A4+86ZfsA4oaK4iSlPpNmVLHYNk8bJpTAaYkuC8w4U96OCAUYwggFCMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQU8ulDojys/8rIp76UTMhGf3gxb6QwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGLBgorBgEEAdZ5AgQCBH0EewB5AHcA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6KAFeQAABAMASDBGAiEAs0hgsUZohe8viaQKcSqwEPEV/Azuk4Q02f+wXj1SGwcCIQD/dZ4ZBNVW2E44onoMB6tdiWRyqKzQtsxD4UPabL7pAzAKBggqhkjOPQQDAwNpADBmAjEAodKmG2dazndUf0CeSijFJYqYbw8/9t0Rp3uiWurGX0dNLi2iym+JnPyo+MHQkCaqAjEAzjt8C/TNJX2nqXp4spq8bPtNmJLFdqwNS8cfuMfmoSnQX4JbsuA7DgAikFFL7PYK"
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    },
+    "dsseEnvelope": {
+      "payload": "ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YwLjEiLAogICJzdWJqZWN0IjogWwogICAgewogICAgICAibmFtZSI6ICJwa2c6bnBtLyU0MG5wbWNsaS9saWJucG1wdWJsaXNoLXRlc3RAMS4wLjAiLAogICAgICAiZGlnZXN0IjogewogICAgICAgICJzaGE1MTIiOiAiOWYwM2M2ZTI4YjllNGM3ZDg1ZjFmMThkMDg3MDYxNWYwYjlmMjQ1NGVhZWIxZGU3OTlmNGVkNDY1ODdmMTRhZDIzOWExYjIzY2E0YzA5Y2Q3MWNmMDkzMGYwNjBlOTU3MmU5OGMzMzcyZDJjOWY4Y2Y0Zjc2YTkyMzRmZTJkZjciCiAgICAgIH0KICAgIH0KICBdLAogICJwcmVkaWNhdGVUeXBlIjogImh0dHBzOi8vc2xzYS5kZXYvcHJvdmVuYW5jZS92MC4yIiwKICAicHJlZGljYXRlIjogewogICAgImJ1aWxkVHlwZSI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaS9naGFAdjAiLAogICAgImJ1aWxkZXIiOiB7CiAgICAgICJpZCI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUA5LjEuMyIKICAgIH0sCiAgICAiaW52b2NhdGlvbiI6IHsKICAgICAgImNvbmZpZ1NvdXJjZSI6IHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUByZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfSwKICAgICAgICAiZW50cnlQb2ludCI6ICJwdWJsaXNoIgogICAgICB9LAogICAgICAicGFyYW1ldGVycyI6IHt9LAogICAgICAiZW52aXJvbm1lbnQiOiB7CiAgICAgICAgIkdJVEhVQl9FVkVOVF9OQU1FIjogIndvcmtmbG93X2Rpc3BhdGNoIiwKICAgICAgICAiR0lUSFVCX0pPQiI6ICJwcm9kdWN0aW9uIiwKICAgICAgICAiR0lUSFVCX1JFRiI6ICJyZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgIkdJVEhVQl9SRUZfVFlQRSI6ICJicmFuY2giLAogICAgICAgICJHSVRIVUJfUkVQT1NJVE9SWSI6ICJucG0vY2xpIiwKICAgICAgICAiR0lUSFVCX1JVTl9BVFRFTVBUIjogIjEiLAogICAgICAgICJHSVRIVUJfUlVOX0lEIjogIjM2MjIzMTQzMDEiLAogICAgICAgICJHSVRIVUJfUlVOX05VTUJFUiI6ICIyNSIsCiAgICAgICAgIkdJVEhVQl9TSEEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIsCiAgICAgICAgIkdJVEhVQl9XT1JLRkxPVyI6ICJwdWJsaXNoIiwKICAgICAgICAiSU1BR0VfT1MiOiAidWJ1bnR1MjIiLAogICAgICAgICJJTUFHRV9WRVJTSU9OIjogIjIwMjIxMTI3LjEiLAogICAgICAgICJSVU5ORVJfQVJDSCI6ICJYNjQiLAogICAgICAgICJSVU5ORVJfTkFNRSI6ICJIb3N0ZWQgQWdlbnQiLAogICAgICAgICJSVU5ORVJfT1MiOiAiTGludXgiCiAgICAgIH0KICAgIH0sCiAgICAibWV0YWRhdGEiOiB7CiAgICAgICJidWlsZEludm9jYXRpb25JZCI6ICIzNjIyMzE0MzAxLTEiLAogICAgICAiY29tcGxldGVuZXNzIjogewogICAgICAgICJwYXJhbWV0ZXJzIjogZmFsc2UsCiAgICAgICAgImVudmlyb25tZW50IjogZmFsc2UsCiAgICAgICAgIm1hdGVyaWFscyI6IGZhbHNlCiAgICAgIH0sCiAgICAgICJyZXByb2R1Y2libGUiOiBmYWxzZQogICAgfSwKICAgICJtYXRlcmlhbHMiOiBbCiAgICAgIHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaSIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfQogICAgICB9CiAgICBdCiAgfQp9Cg==",
+      "payloadType": "application/vnd.in-toto+json",
+      "signatures": [
+        {
+          "sig": "MEYCIQD1alhq7gNB8RggxFIqdTEQ4TIixee1DGb7dlDS6yRa4QIhAPVRSHOY7MjXItbc+RS5BesauruDqL2mc8GOYdumhG4p",
+          "keyid": ""
+        }
+      ]
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/invalid-signature-bundle.json b/workspaces/libnpmpublish/test/fixtures/invalid-signature-bundle.json
new file mode 100644
index 0000000000000..01263dea7a43b
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/invalid-signature-bundle.json
@@ -0,0 +1,51 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8546868",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670359999",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEQCIFjNU2nXGfYqtbauWbMIhLQ0YqjVZTk9xBULR0NGc8uRAiBmBO0GvpYiqeFfI7Uq41unOBGKKyJaTSS/GqHXh1urGg=="
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU51ZWtORFFXbGhaMEYzU1VKQlowbFZVa3hFZFVGR01rZFViVEF3WkhwT1FXWnFRV05hYnpKcFNuZHJkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5ha0V4VFhwRk5GZG9ZMDVOYWtsNFRXcEJNazFxUlhkTmVrVTBWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWb1JXVkZPRGRhWXpWdVFtcFpVRXgzVDFNelppdE9RMkZsUlZsUGIxWjVVVVJvWm13S1ZuWkpObVpZUW1FMllYQTViM1psWkZsSVNqTjZXbHBNVkROWmJUVXdZMk52YUdkSVEybHJRVVJPT0V4RVFXbEJaRXRQUTBGVlZYZG5aMFpDVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWVlYzWllDa3c0TTNwUFJIWnBVV3hIY0hsVWRYWTJaakJXUTNSQmQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIwdENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklkMFZsWjBJMFFVaFpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaVVltWUtTMEZCUVVKQlRVRlNla0pHUVdsQ1VqVnphRTEwVVZKWVVsWXhWVzlCZDBsUVNVMTZSRVpxZVZKcVdGTldlaThyTjJ0eVUxZGhVbXhvUVVsb1FVbFFOZ3AyU25aaVJGUXdaa0ZOTUZCNlUyVkpSMXBxWjNoa0swOXBabmxVV21FMmFWaHRlV3BXVEU5SlRVRnZSME5EY1VkVFRUUTVRa0ZOUkVFeVkwRk5SMUZEQ2sxRWQxZHhha3B1UnpSVVMwNUZZMmR3UzNRMVUwZDNaRmhaWW1GVWQwRXplSEJSVWpSUUsySlJNVE5yWVRsUGVHbEhhM0JxV1ZwRlZWaHpWMGhqTTJFS2MyZEpkMUJyVWt0S2JHZFVha3RrYmxwVFZYRmxaV2hGVXpNMk1VdDNjMjB6Y2xVdmJqbDZla1ZLVTB0dVNuTlFaSFp0UVZSQmFYZHlRMHR3YzB0NWRnb3JWekZqQ2kwdExTMHRSVTVFSUVORlVsUkpSa2xEUVZSRkxTMHRMUzBLIiwic2lnIjoiVFVWVlEwbFJSSEZJZEhCcmF6RmtNSEpOUjB4dFpqTnhaWFE1YWt4aGJHVXpTMVp1T0ZCdWVYZHdkM1EzYkc0ck9VRkpaMGM1UTBwMmRsVnRlV1Z0YUU1WlNIb3dSR1pLTkhaTlprdHJNVlJOWnl0dE0yaFNNRzFKVTFoS2IzTTkifV19LCJoYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiOTY4NGFhZjk0NTJjYmU1Mzg1NGVkODM2OWRmM2NiZDc2ZjM2YzI0YzI4YjE5MjlkN2JlMDExMzhjNmE0ZDAxZSJ9LCJwYXlsb2FkSGFzaCI6eyJhbGdvcml0aG0iOiJzaGEyNTYiLCJ2YWx1ZSI6ImM2OTQzNTJlMTkzYmM1OGVlNzZlZTc2NTQwNGRkMGQwN2IwNDM4ODVkMWQ5NzAyZTBlNGQxOGQxNzIzOWM1ZjMifX19fQ=="
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICnzCCAiagAwIBAgIURLDuAF2GTm00dzNAfjAcZo2iJwkwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MjA1MzE4WhcNMjIxMjA2MjEwMzE4WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEhEeE87Zc5nBjYPLwOS3f+NCaeEYOoVyQDhflVvI6fXBa6ap9ovedYHJ3zZZLT3Ym50ccohgHCikADN8LDAiAdKOCAUUwggFBMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUUWvXL83zODviQlGpyTuv6f0VCtAwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6TbfKAAABAMARzBFAiBR5shMtQRXRV1UoAwIPIMzDFjyRjXSVz/+7krSWaRlhAIhAIP6vJvbDT0fAM0PzSeIGZjgxd+OifyTZa6iXmyjVLOIMAoGCCqGSM49BAMDA2cAMGQCMDwWqjJnG4TKNEcgpKt5SGwdXYbaTwA3xpQR4P+bQ13ka9OxiGkpjYZEUXsWHc3asgIwPkRKJlgTjKdnZSUqeehES361Kwsm3rU/n9zzEJSKnJsPdvmATAiwrCKpsKyv+W1c"
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    },
+    "dsseEnvelope": {
+      "payload": "ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YwLjEiLAogICJzdWJqZWN0IjogWwogICAgewogICAgICAibmFtZSI6ICJwa2c6bnBtLyU0MG5wbWNsaS9saWJucG1wdWJsaXNoLXRlc3RAMS4wLjAiLAogICAgICAiZGlnZXN0IjogewogICAgICAgICJzaGE1MTIiOiAiYmE1ZDJlY2ZhNjY3MjRlMWZhZmU0NzM0ODkzMzk0NjRhODZlZjRiNWM2OTcxOGFkOTQ3MzFmMzExYjJkNjA1NzhhODMxMzRiMzM5M2FmOTgyOGFlZjgzZGM1ZWRiMTkxMjZhOGFlZGE1NTI1M2MzMWVjMTkwOWNiZmQ0OGY2NmYiCiAgICAgIH0KICAgIH0KICBdLAogICJwcmVkaWNhdGVUeXBlIjogImh0dHBzOi8vc2xzYS5kZXYvcHJvdmVuYW5jZS92MC4yIiwKICAicHJlZGljYXRlIjogewogICAgImJ1aWxkVHlwZSI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaS9naGFAdjAiLAogICAgImJ1aWxkZXIiOiB7CiAgICAgICJpZCI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUA5LjEuMyIKICAgIH0sCiAgICAiaW52b2NhdGlvbiI6IHsKICAgICAgImNvbmZpZ1NvdXJjZSI6IHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUByZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfSwKICAgICAgICAiZW50cnlQb2ludCI6ICJwdWJsaXNoIgogICAgICB9LAogICAgICAicGFyYW1ldGVycyI6IHt9LAogICAgICAiZW52aXJvbm1lbnQiOiB7CiAgICAgICAgIkdJVEhVQl9FVkVOVF9OQU1FIjogIndvcmtmbG93X2Rpc3BhdGNoIiwKICAgICAgICAiR0lUSFVCX0pPQiI6ICJwcm9kdWN0aW9uIiwKICAgICAgICAiR0lUSFVCX1JFRiI6ICJyZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgIkdJVEhVQl9SRUZfVFlQRSI6ICJicmFuY2giLAogICAgICAgICJHSVRIVUJfUkVQT1NJVE9SWSI6ICJucG0vY2xpIiwKICAgICAgICAiR0lUSFVCX1JVTl9BVFRFTVBUIjogIjEiLAogICAgICAgICJHSVRIVUJfUlVOX0lEIjogIjM2MjIzMTQzMDEiLAogICAgICAgICJHSVRIVUJfUlVOX05VTUJFUiI6ICIyNSIsCiAgICAgICAgIkdJVEhVQl9TSEEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIsCiAgICAgICAgIkdJVEhVQl9XT1JLRkxPVyI6ICJwdWJsaXNoIiwKICAgICAgICAiSU1BR0VfT1MiOiAidWJ1bnR1MjIiLAogICAgICAgICJJTUFHRV9WRVJTSU9OIjogIjIwMjIxMTI3LjEiLAogICAgICAgICJSVU5ORVJfQVJDSCI6ICJYNjQiLAogICAgICAgICJSVU5ORVJfTkFNRSI6ICJIb3N0ZWQgQWdlbnQiLAogICAgICAgICJSVU5ORVJfT1MiOiAiTGludXgiCiAgICAgIH0KICAgIH0sCiAgICAibWV0YWRhdGEiOiB7CiAgICAgICJidWlsZEludm9jYXRpb25JZCI6ICIzNjIyMzE0MzAxLTEiLAogICAgICAiY29tcGxldGVuZXNzIjogewogICAgICAgICJwYXJhbWV0ZXJzIjogZmFsc2UsCiAgICAgICAgImVudmlyb25tZW50IjogZmFsc2UsCiAgICAgICAgIm1hdGVyaWFscyI6IGZhbHNlCiAgICAgIH0sCiAgICAgICJyZXByb2R1Y2libGUiOiBmYWxzZQogICAgfSwKICAgICJtYXRlcmlhbHMiOiBbCiAgICAgIHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaSIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfQogICAgICB9CiAgICBdCiAgfQp9Cg==",
+      "payloadType": "application/vnd.in-toto+json",
+      "signatures": [
+        {
+          "sig": "MEUCIQDHqtpk1kd0MrGLm3fqet9jaLle3KVn8Pnywwpt7ln+9AIgG9CJvvUmyemhNYHz0fDJ4vMfKk1TMg+3mRh0mSIXosJ=",
+          "keyid": ""
+        }
+      ]
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/multi-subject-provenance-bundle.json b/workspaces/libnpmpublish/test/fixtures/multi-subject-provenance-bundle.json
new file mode 100644
index 0000000000000..a80f47d67b40a
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/multi-subject-provenance-bundle.json
@@ -0,0 +1,51 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8539123",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670349898",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEYCIQDvnybZlYTHdLpL2j7763TmqB1Cb2GHxXKPP2dBsSODMAIhAPMct25po0Tu7XmAmfvONVia0W5Zbx9qyB5Lb5jQz278"
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU52UkVORFFXbGhaMEYzU1VKQlowbFZTM0p4ZFVkb1FuWklORlpIVjBwQk1qTmlhRVJUZDNBdk1ucEZkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5WR2QzVGtSVk5GZG9ZMDVOYWtsNFRXcEJNazFVWjNoT1JGVTBWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWa2JtUlFka0ZIU0UweVFYbEdReXRVYzA5WE1FOUpUR2RXWWpSb1IzRnpSMVkxV1RVS2J6bEpOMUl2YWxOclRtUklhRzVZT1ZoWkt6Wk5abkZIZUM5bVpIRjVha2hPUW5ONFkzUkhXbk5aV21SWWJrVllLMHRQUTBGVlZYZG5aMFpDVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWNWFqWkNDbkoxVmxZNGFVeHFheTh3S3pSUldreERRV2M1VlhGRmQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIwdENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklkMFZsWjBJMFFVaFpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaS2VrTUtUbWRCUVVKQlRVRlNla0pHUVdsQk9FZFVTbmxtTmxob09WZDViVkJ2YTFkVVNqbHBUMDA1TWtOcVZHcHlNbVZHUVROcmQwcG9SazVFVVVsb1FVMTRLd3BHWTAwMVVtTmtWWEJqYzBWSWVVMVphWFUwWVV4YVZYRkpjbW92VnpWbGRYbDNSbnBuY0c1M1RVRnZSME5EY1VkVFRUUTVRa0ZOUkVFeVowRk5SMVZEQ2sxUlJHNXZSSEpVWWtwRlIxcHVValE1V20xbVprbENOek5xU2treGJHdHBWRnBzUnpKc1ZYbDRWV2M0TkdKUVlpdExaRXhRTlZCdVlpdFpTR0pzVFdjS2EzRTBRMDFFUzBGSVlUSnNSblFyUjBaQ2JrbFBOMjlpYkRGdmMwOXpXV2RSVG5ORE9UQlRWSGc1Y0RnMlptRlBOalJPYlhSalQzZDFhRWw0YURCd2N3cHRTMWxMVkhjOVBRb3RMUzB0TFVWT1JDQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENnPT0iLCJzaWciOiJUVVZWUTBsSWNISjBaeXRCU0RKR2VWaHpkWFkxV0hkeVFTOUthbXd5VW5oTlNWTk5OVFpzV1RjMFNURTFPWGRwUVdsRlFXNVJTbXBwV1RORksyMTZlRTlMTkV4alowTldRMHhGTkRCYWEzQXJiVFZPZVZBNFJFMUtNMlJoVHpBOSJ9XX0sImhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJkNTM5YjM3NzA1OTFhMGYxZDgwN2IwMjhlYzgwNGI0NjZmNmY4OWJmMGM5ZDc4YTYxMzE1ODdlZWE0MzMxNThjIn0sInBheWxvYWRIYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiNWM4YWJiYTVhZjIxMTRiYTA4NmE0YjU0MjViNjJjOTdkNjExN2ZiYTJhYTdjZDg2NDUxYjY4MjhhYjhiZGRkMiJ9fX19"
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICoDCCAiagAwIBAgIUKrquGhBvH4VGWJA23bhDSwp/2zEwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MTgwNDU4WhcNMjIxMjA2MTgxNDU4WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEdndPvAGHM2AyFC+TsOW0OILgVb4hGqsGV5Y5o9I7R/jSkNdHhnX9XY+6MfqGx/fdqyjHNBsxctGZsYZdXnEX+KOCAUUwggFBMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUyj6BruVV8iLjk/0+4QZLCAg9UqEwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6JzCNgAABAMARzBFAiA8GTJyf6Xh9WymPokWTJ9iOM92CjTjr2eFA3kwJhFNDQIhAMx+FcM5RcdUpcsEHyMYiu4aLZUqIrj/W5euywFzgpnwMAoGCCqGSM49BAMDA2gAMGUCMQDnoDrTbJEGZnR49ZmffIB73jJI1lkiTZlG2lUyxUg84bPb+KdLP5Pnb+YHblMgkq4CMDKAHa2lFt+GFBnIO7obl1osOsYgQNsC90STx9p86faO64NmtcOwuhIxh0psmKYKTw=="
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    },
+    "dsseEnvelope": {
+      "payload": "ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YwLjEiLAogICJzdWJqZWN0IjogWwogICAgewogICAgICAibmFtZSI6ICJwa2c6bnBtLyU0MG5wbWNsaS9saWJucG1wdWJsaXNoLXRlc3RAMS4wLjAiLAogICAgICAiZGlnZXN0IjogewogICAgICAgICJzaGE1MTIiOiAiZjA5MzZjZTgyYmU5NDdjZDU4ZjExZjhkODA3MDE2NTBmYmY5MjQ0NWFlZTFiZDdlOTlmNGRlNDY1ODdmMTRhZDIzOWExYjIzY2E0YzA5Y2Q3MWNmMDkzMGYwNjBlOTU3MmU5OGMzMzcyZDJjOWY4Y2Y0Zjc2YTkyMzRmZTJkZjciCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJuYW1lIjogInBrZzpucG0vJTQwbnBtY2xpL2xpYm5wbXB1Ymxpc2gtdGVzdEAxLjAuMSIsCiAgICAgICJkaWdlc3QiOiB7CiAgICAgICAgInNoYTUxMiI6ICJhZjM5YzY4ZWIyOWU3NGRjNThmMTFmOGQ4MDcwMTY1MGZiZjkyNDQ1YWVlMWJkN2U5OWY0ZGU0NjU4N2YxNGFkMjM5YTFiMjNjYTRjMDljZDcxY2YwOTMwZjA2MGU5NTcyZTk4YzMzNzJkMmM5ZjhjZjRmNzZhOTIzNGZlMmRmNyIKICAgICAgfQogICAgfQogIF0sCiAgInByZWRpY2F0ZVR5cGUiOiAiaHR0cHM6Ly9zbHNhLmRldi9wcm92ZW5hbmNlL3YwLjIiLAogICJwcmVkaWNhdGUiOiB7CiAgICAiYnVpbGRUeXBlIjogImh0dHBzOi8vZ2l0aHViLmNvbS9ucG0vY2xpL2doYUB2MCIsCiAgICAiYnVpbGRlciI6IHsKICAgICAgImlkIjogImh0dHBzOi8vZ2l0aHViLmNvbS9ucG0vY2xpQDkuMS4zIgogICAgfSwKICAgICJpbnZvY2F0aW9uIjogewogICAgICAiY29uZmlnU291cmNlIjogewogICAgICAgICJ1cmkiOiAiZ2l0K2h0dHBzOi8vZ2l0aHViLmNvbS9ucG0vY2xpQHJlZnMvaGVhZHMvbGF0ZXN0IiwKICAgICAgICAiZGlnZXN0IjogewogICAgICAgICAgInNoYTEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIKICAgICAgICB9LAogICAgICAgICJlbnRyeVBvaW50IjogInB1Ymxpc2giCiAgICAgIH0sCiAgICAgICJwYXJhbWV0ZXJzIjoge30sCiAgICAgICJlbnZpcm9ubWVudCI6IHsKICAgICAgICAiR0lUSFVCX0VWRU5UX05BTUUiOiAid29ya2Zsb3dfZGlzcGF0Y2giLAogICAgICAgICJHSVRIVUJfSk9CIjogInByb2R1Y3Rpb24iLAogICAgICAgICJHSVRIVUJfUkVGIjogInJlZnMvaGVhZHMvbGF0ZXN0IiwKICAgICAgICAiR0lUSFVCX1JFRl9UWVBFIjogImJyYW5jaCIsCiAgICAgICAgIkdJVEhVQl9SRVBPU0lUT1JZIjogIm5wbS9jbGkiLAogICAgICAgICJHSVRIVUJfUlVOX0FUVEVNUFQiOiAiMSIsCiAgICAgICAgIkdJVEhVQl9SVU5fSUQiOiAiMzYyMjMxNDMwMSIsCiAgICAgICAgIkdJVEhVQl9SVU5fTlVNQkVSIjogIjI1IiwKICAgICAgICAiR0lUSFVCX1NIQSI6ICIxY2YxNzgwNGM3NjI1YzVjNzhlYzk2OTZmOWZlZjdlY2M0NGUwYzU4IiwKICAgICAgICAiR0lUSFVCX1dPUktGTE9XIjogInB1Ymxpc2giLAogICAgICAgICJJTUFHRV9PUyI6ICJ1YnVudHUyMiIsCiAgICAgICAgIklNQUdFX1ZFUlNJT04iOiAiMjAyMjExMjcuMSIsCiAgICAgICAgIlJVTk5FUl9BUkNIIjogIlg2NCIsCiAgICAgICAgIlJVTk5FUl9OQU1FIjogIkhvc3RlZCBBZ2VudCIsCiAgICAgICAgIlJVTk5FUl9PUyI6ICJMaW51eCIKICAgICAgfQogICAgfSwKICAgICJtZXRhZGF0YSI6IHsKICAgICAgImJ1aWxkSW52b2NhdGlvbklkIjogIjM2MjIzMTQzMDEtMSIsCiAgICAgICJjb21wbGV0ZW5lc3MiOiB7CiAgICAgICAgInBhcmFtZXRlcnMiOiBmYWxzZSwKICAgICAgICAiZW52aXJvbm1lbnQiOiBmYWxzZSwKICAgICAgICAibWF0ZXJpYWxzIjogZmFsc2UKICAgICAgfSwKICAgICAgInJlcHJvZHVjaWJsZSI6IGZhbHNlCiAgICB9LAogICAgIm1hdGVyaWFscyI6IFsKICAgICAgewogICAgICAgICJ1cmkiOiAiZ2l0K2h0dHBzOi8vZ2l0aHViLmNvbS9ucG0vY2xpIiwKICAgICAgICAiZGlnZXN0IjogewogICAgICAgICAgInNoYTEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIKICAgICAgICB9CiAgICAgIH0KICAgIF0KICB9Cn0K",
+      "payloadType": "application/vnd.in-toto+json",
+      "signatures": [
+        {
+          "sig": "MEUCIHprtg+AH2FyXsuv5XwrA/Jjl2RxMISM56lY74I159wiAiEAnQJjiY3E+mzxOK4LcgCVCLE40Zkp+m5NyP8DMJ3daO0=",
+          "keyid": ""
+        }
+      ]
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/no-provenance-envelope-bundle.json b/workspaces/libnpmpublish/test/fixtures/no-provenance-envelope-bundle.json
new file mode 100644
index 0000000000000..5aa183cd7897b
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/no-provenance-envelope-bundle.json
@@ -0,0 +1,41 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8538105",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670348537",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEYCIQC+8VDaS3r4BEfK2BOvV2Mu6cHLwuNMNk0xM/3Fodsv+wIhANo2BDNBFjrwfZfqFPNLoUhrNX65Ymw0SuWVmiUC06U0"
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU51ZWtORFFXbGhaMEYzU1VKQlowbFZVR1JvYlVKckwwVXdWVkZzU0RKc1oycG5TekpJWTBGYWJtNHdkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5WR013VFdwRk1sZG9ZMDVOYWtsNFRXcEJNazFVWXpGTmFrVXlWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWaUswVm9lV0pJZWs4dk5IVm9iV3hvWVZwMVdpdDFOWHBYZEdkcEwyazNOakJ2YWpZS0swZDNabWhqTTBabVpIbHNTMDVRTTFVNVdHWjZhbWxYZUU1RU1ERkxVV1oyWVhscFVrWkdTV3hDZFROUWIwOWlOSEZQUTBGVlZYZG5aMFpDVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWRmVrRnJDa0l4YzFvNGMyVndLMnRpWmpGT1ZGRlhiV3c1T0VaSmQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIwdENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklkMFZsWjBJMFFVaFpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaSlpqY0tXVkZCUVVKQlRVRlNla0pHUVdsRlFXMUNSR0V2WkVscmFIWjNVa28zV2t0cFZIRjVLelZqY0hFMWRFMXFiMDVvTm5SU2J6SklRVE53UTBsRFNVZFNkZ3AwVWk5bWJWZ3lVbTFqY0VkR2RFUkNVa3M0VlRKYVMxazJhbE5PU21WSlMydzJLMDAzVDBObVRVRnZSME5EY1VkVFRUUTVRa0ZOUkVFeVkwRk5SMUZEQ2sxR1MzbE5RbTFGY2tWRFp5czBXRmxJUWtsdWJFUjRZMDB3ZDFFdmVqUTBjMGdyZVdwTmNrbExNV1JWV1hkU1lUVnZhM3BTTURVeWFUQlpaMEZ0U0VnS09GRkpkMDlJTkV0RlJYUklWSGxyVjNOUU1ITk1UMkV3V2pkbk1rbE1TWGRXZUhac1ltMHZhVUUwTjJRNFFYZG5UMHMxWkRSS05XRkRVbGRoUlZodVFncFFjMWN6Q2kwdExTMHRSVTVFSUVORlVsUkpSa2xEUVZSRkxTMHRMUzBLIiwic2lnIjoiVFVWVlEwbENPVU55VGpaSVRuVXdOMUpsUnpWVU9ERm1WMmdyU1ZCMVN6ZEVka0YxYkdveE5YWm9aVkI0UTJWWVFXbEZRVGhLWkZnMWNFRnJjMjR2VW1rclVsUnVRa2R1VW5ob1ZpdFFUaTltTUZZeWRtOHZaM05EUjJVM0t6UTkifV19LCJoYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiZGVhNzQyNzQ3ZTZjMzFmNmU4MTAzMjM1YjA0MTc1ODFhODY4ZDliNjQ2YjYyMmE3OGNmNzE3OTE2ZTA2ZDkxNiJ9LCJwYXlsb2FkSGFzaCI6eyJhbGdvcml0aG0iOiJzaGEyNTYiLCJ2YWx1ZSI6IjczOWJkMWQ0MzQ4NmRmNTdkNDY3MGM0NDNiNjViOWUxYWQ5MTg0MTFmYjJjMjgwOWVkM2RhZDQ4ZGIwN2Q5NDAifX19fQ=="
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICnzCCAiagAwIBAgIUPdhmBk/E0UQlH2lgjgK2HcAZnn0wCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MTc0MjE2WhcNMjIxMjA2MTc1MjE2WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEb+EhybHzO/4uhmlhaZuZ+u5zWtgi/i760oj6+Gwfhc3FfdylKNP3U9XfzjiWxND01KQfvayiRFFIlBu3PoOb4qOCAUUwggFBMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUEzAkB1sZ8sep+kbf1NTQWml98FIwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6If7YQAABAMARzBFAiEAmBDa/dIkhvwRJ7ZKiTqy+5cpq5tMjoNh6tRo2HA3pCICIGRvtR/fmX2RmcpGFtDBRK8U2ZKY6jSNJeIKl6+M7OCfMAoGCCqGSM49BAMDA2cAMGQCMFKyMBmErECg+4XYHBInlDxcM0wQ/z44sH+yjMrIK1dUYwRa5okzR052i0YgAmHH8QIwOH4KEEtHTykWsP0sLOa0Z7g2ILIwVxvlbm/iA47d8AwgOK5d4J5aCRWaEXnBPsW3"
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/no-provenance-subject-bundle.json b/workspaces/libnpmpublish/test/fixtures/no-provenance-subject-bundle.json
new file mode 100644
index 0000000000000..3709c4cca8682
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/no-provenance-subject-bundle.json
@@ -0,0 +1,51 @@
+{
+    "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+    "verificationData": {
+      "tlogEntries": [
+        {
+          "logIndex": "8539003",
+          "logId": {
+            "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+          },
+          "kindVersion": {
+            "kind": "intoto",
+            "version": "0.0.2"
+          },
+          "integratedTime": "1670349725",
+          "inclusionPromise": {
+            "signedEntryTimestamp": "MEQCIHVPdHXiEoDPrA+DUibJaXJgD3XpPbKGPoXDFG2HoqxSAiBIaIHumdQwjH4ca1L8dWDXTm24KWx4LOhV6Kbjcd+Wyg=="
+          },
+          "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU52VkVORFFXbGhaMEYzU1VKQlowbFZSa000WVZST2JGTTRVRXB5TVV0TGJtSldiRkpuVkhWalpVZEpkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5WR2QzVFdwQk1WZG9ZMDVOYWtsNFRXcEJNazFVWjNoTmFrRXhWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWdmVEZ3piVXh6ZWtSVFpYbENiWEJ6TTFFemRYUXZVUzlzYlhabVN6RlZiMHBWVnpVS1JqaEhjak5OVFRFd1RWQkhlblZMTmtsWVdHNUdkVXByY3pKdWRpczVXRGxhZFVVNGRrZGhRMVJTVkd3d2FUTndLM0ZQUTBGVlZYZG5aMFpDVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWSGFWZEtDbXhXVEZvNWVVMTVSa1pWVXpCaVlWazRLMGRGVVhKemQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIwdENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklkMFZsWjBJMFFVaFpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaS2IyUUtORUZCUVVKQlRVRlNla0pHUVdsQmJESldhbTFpV0Rsekx6VllUemMzV1hOb2QxUnRRV1p5THpsMFFWQkhTV2hKYms0emRtMUpXa1pqVVVsb1FVbGxOQW94Wm1WSFVYcGFkRk5pYWtkRVVuTnBZMGhTTkRGSFlWRldjWEJ4VEZOMlVUZEJLelZHUzJaYVRVRnZSME5EY1VkVFRUUTVRa0ZOUkVFeWEwRk5SMWxEQ2sxUlEzaERTMWhDUW1sek9WcFZiVzB2UzB0QldGWmpjRU5KTWpWbGNGa3lXSGMxYkdGT0sxbHdVMWw0ZDFCYVprY3ZRMmRXTlhwdFQySXZPRFZsVTJjS2JEQlJRMDFSUkcxNU5HUkJVMFpXWmxCdGNWbENkMnhoTkVOTlpubG1WRXQySzNwUlVVdE1TelkyYVhwVlMwZFVNMjlEV21KRmNFRlNjaXRSVGxGbE9RcDRTWFZ6YVZKelBRb3RMUzB0TFVWT1JDQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENnPT0iLCJzaWciOiJUVVZWUTBsUlJFRm5iRVp0Um1OTFEyZzFVRUZIYmt0dUszVm5PRFpCZUc1MVNqRXZNQ3RWVUVaNlkwUndRamxyUW1kSlowdERLM3BzTVRNNVNtUkpla3ROV2prcmIzbHRja0ZMTTNwamNYSkdTSGRKYkZkb1pWZDZaa2RwTTJzOSJ9XX0sImhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJlNTkxM2M1YWE4ZDdkMmYxNGM2YmM4NjhmOWEzMzg0MzJlYWUwMTE2NzU0NDQ3NjM2YWE0YmZkMDFiNTNmMmU0In0sInBheWxvYWRIYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiOGQ0NGU1MWM5ZWY4NTYxZTEwYzlmYzliOGRiZTBkYjg0YzczMjc5MmQzMDQwNGVkODFmNDFhYjQ3ZTQ2Zjg2NiJ9fX19"
+        }
+      ],
+      "timestampVerificationData": {
+        "rfc3161Timestamps": []
+      }
+    },
+    "verificationMaterial": {
+      "x509CertificateChain": {
+        "certificates": [
+          {
+            "rawBytes": "MIICoTCCAiagAwIBAgIUFC8aTNlS8PJr1KKnbVlRgTuceGIwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MTgwMjA1WhcNMjIxMjA2MTgxMjA1WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEox83mLszDSeyBmps3Q3ut/Q/lmvfK1UoJUW5F8Gr3MM10MPGzuK6IXXnFuJks2nv+9X9ZuE8vGaCTRTl0i3p+qOCAUUwggFBMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUGiWJlVLZ9yMyFFUS0baY8+GEQrswHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6Jod4AAABAMARzBFAiAl2VjmbX9s/5XO77YshwTmAfr/9tAPGIhInN3vmIZFcQIhAIe41feGQzZtSbjGDRsicHR41GaQVqpqLSvQ7A+5FKfZMAoGCCqGSM49BAMDA2kAMGYCMQCxCKXBBis9ZUmm/KKAXVcpCI25epY2Xw5laN+YpSYxwPZfG/CgV5zmOb/85eSgl0QCMQDmy4dASFVfPmqYBwla4CMfyfTKv+zQQKLK66izUKGT3oCZbEpARr+QNQe9xIusiRs="
+          },
+          {
+            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+          },
+          {
+            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+          }
+        ]
+      }
+    },
+    "dsseEnvelope": {
+      "payload": "ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YwLjEiLAogICJwcmVkaWNhdGVUeXBlIjogImh0dHBzOi8vc2xzYS5kZXYvcHJvdmVuYW5jZS92MC4yIiwKICAicHJlZGljYXRlIjogewogICAgImJ1aWxkVHlwZSI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaS9naGFAdjAiLAogICAgImJ1aWxkZXIiOiB7CiAgICAgICJpZCI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUA5LjEuMyIKICAgIH0sCiAgICAiaW52b2NhdGlvbiI6IHsKICAgICAgImNvbmZpZ1NvdXJjZSI6IHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUByZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfSwKICAgICAgICAiZW50cnlQb2ludCI6ICJwdWJsaXNoIgogICAgICB9LAogICAgICAicGFyYW1ldGVycyI6IHt9LAogICAgICAiZW52aXJvbm1lbnQiOiB7CiAgICAgICAgIkdJVEhVQl9FVkVOVF9OQU1FIjogIndvcmtmbG93X2Rpc3BhdGNoIiwKICAgICAgICAiR0lUSFVCX0pPQiI6ICJwcm9kdWN0aW9uIiwKICAgICAgICAiR0lUSFVCX1JFRiI6ICJyZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgIkdJVEhVQl9SRUZfVFlQRSI6ICJicmFuY2giLAogICAgICAgICJHSVRIVUJfUkVQT1NJVE9SWSI6ICJucG0vY2xpIiwKICAgICAgICAiR0lUSFVCX1JVTl9BVFRFTVBUIjogIjEiLAogICAgICAgICJHSVRIVUJfUlVOX0lEIjogIjM2MjIzMTQzMDEiLAogICAgICAgICJHSVRIVUJfUlVOX05VTUJFUiI6ICIyNSIsCiAgICAgICAgIkdJVEhVQl9TSEEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIsCiAgICAgICAgIkdJVEhVQl9XT1JLRkxPVyI6ICJwdWJsaXNoIiwKICAgICAgICAiSU1BR0VfT1MiOiAidWJ1bnR1MjIiLAogICAgICAgICJJTUFHRV9WRVJTSU9OIjogIjIwMjIxMTI3LjEiLAogICAgICAgICJSVU5ORVJfQVJDSCI6ICJYNjQiLAogICAgICAgICJSVU5ORVJfTkFNRSI6ICJIb3N0ZWQgQWdlbnQiLAogICAgICAgICJSVU5ORVJfT1MiOiAiTGludXgiCiAgICAgIH0KICAgIH0sCiAgICAibWV0YWRhdGEiOiB7CiAgICAgICJidWlsZEludm9jYXRpb25JZCI6ICIzNjIyMzE0MzAxLTEiLAogICAgICAiY29tcGxldGVuZXNzIjogewogICAgICAgICJwYXJhbWV0ZXJzIjogZmFsc2UsCiAgICAgICAgImVudmlyb25tZW50IjogZmFsc2UsCiAgICAgICAgIm1hdGVyaWFscyI6IGZhbHNlCiAgICAgIH0sCiAgICAgICJyZXByb2R1Y2libGUiOiBmYWxzZQogICAgfSwKICAgICJtYXRlcmlhbHMiOiBbCiAgICAgIHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaSIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfQogICAgICB9CiAgICBdCiAgfQp9Cg==",
+      "payloadType": "application/vnd.in-toto+json",
+      "signatures": [
+        {
+          "sig": "MEUCIQDAglFmFcKCh5PAGnKn+ug86AxnuJ1/0+UPFzcDpB9kBgIgKC+zl139JdIzKMZ9+oymrAK3zcqrFHwIlWheWzfGi3k=",
+          "keyid": ""
+        }
+      ]
+    }
+  }
+  
\ No newline at end of file
diff --git a/workspaces/libnpmpublish/test/fixtures/valid-bundle.json b/workspaces/libnpmpublish/test/fixtures/valid-bundle.json
new file mode 100644
index 0000000000000..134c95387c481
--- /dev/null
+++ b/workspaces/libnpmpublish/test/fixtures/valid-bundle.json
@@ -0,0 +1,50 @@
+{
+  "mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.1",
+  "verificationData": {
+    "tlogEntries": [
+      {
+        "logIndex": "8546868",
+        "logId": {
+          "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
+        },
+        "kindVersion": {
+          "kind": "intoto",
+          "version": "0.0.2"
+        },
+        "integratedTime": "1670359999",
+        "inclusionPromise": {
+          "signedEntryTimestamp": "MEQCIFjNU2nXGfYqtbauWbMIhLQ0YqjVZTk9xBULR0NGc8uRAiBmBO0GvpYiqeFfI7Uq41unOBGKKyJaTSS/GqHXh1urGg=="
+        },
+        "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjIiLCJraW5kIjoiaW50b3RvIiwic3BlYyI6eyJjb250ZW50Ijp7ImVudmVsb3BlIjp7InBheWxvYWRUeXBlIjoiYXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbiIsInNpZ25hdHVyZXMiOlt7InB1YmxpY0tleSI6IkxTMHRMUzFDUlVkSlRpQkRSVkpVU1VaSlEwRlVSUzB0TFMwdENrMUpTVU51ZWtORFFXbGhaMEYzU1VKQlowbFZVa3hFZFVGR01rZFViVEF3WkhwT1FXWnFRV05hYnpKcFNuZHJkME5uV1VsTGIxcEplbW93UlVGM1RYY0tUbnBGVmsxQ1RVZEJNVlZGUTJoTlRXTXliRzVqTTFKMlkyMVZkVnBIVmpKTlVqUjNTRUZaUkZaUlVVUkZlRlo2WVZka2VtUkhPWGxhVXpGd1ltNVNiQXBqYlRGc1drZHNhR1JIVlhkSWFHTk9UV3BKZUUxcVFUSk5ha0V4VFhwRk5GZG9ZMDVOYWtsNFRXcEJNazFxUlhkTmVrVTBWMnBCUVUxR2EzZEZkMWxJQ2t0dldrbDZhakJEUVZGWlNVdHZXa2w2YWpCRVFWRmpSRkZuUVVWb1JXVkZPRGRhWXpWdVFtcFpVRXgzVDFNelppdE9RMkZsUlZsUGIxWjVVVVJvWm13S1ZuWkpObVpZUW1FMllYQTViM1psWkZsSVNqTjZXbHBNVkROWmJUVXdZMk52YUdkSVEybHJRVVJPT0V4RVFXbEJaRXRQUTBGVlZYZG5aMFpDVFVFMFJ3cEJNVlZrUkhkRlFpOTNVVVZCZDBsSVowUkJWRUpuVGxaSVUxVkZSRVJCUzBKblozSkNaMFZHUWxGalJFRjZRV1JDWjA1V1NGRTBSVVpuVVZWVlYzWllDa3c0TTNwUFJIWnBVV3hIY0hsVWRYWTJaakJXUTNSQmQwaDNXVVJXVWpCcVFrSm5kMFp2UVZVek9WQndlakZaYTBWYVlqVnhUbXB3UzBaWGFYaHBORmtLV2tRNGQwaDNXVVJXVWpCU1FWRklMMEpDVlhkRk5FVlNXVzVLY0ZsWE5VRmFSMVp2V1ZjeGJHTnBOV3BpTWpCM1RFRlpTMHQzV1VKQ1FVZEVkbnBCUWdwQlVWRmxZVWhTTUdOSVRUWk1lVGx1WVZoU2IyUlhTWFZaTWpsMFRESjRkbG95YkhWTU1qbG9aRmhTYjAxSlIwdENaMjl5UW1kRlJVRmtXalZCWjFGRENrSklkMFZsWjBJMFFVaFpRVE5VTUhkaGMySklSVlJLYWtkU05HTnRWMk16UVhGS1MxaHlhbVZRU3pNdmFEUndlV2RET0hBM2J6UkJRVUZIUlRaVVltWUtTMEZCUVVKQlRVRlNla0pHUVdsQ1VqVnphRTEwVVZKWVVsWXhWVzlCZDBsUVNVMTZSRVpxZVZKcVdGTldlaThyTjJ0eVUxZGhVbXhvUVVsb1FVbFFOZ3AyU25aaVJGUXdaa0ZOTUZCNlUyVkpSMXBxWjNoa0swOXBabmxVV21FMmFWaHRlV3BXVEU5SlRVRnZSME5EY1VkVFRUUTVRa0ZOUkVFeVkwRk5SMUZEQ2sxRWQxZHhha3B1UnpSVVMwNUZZMmR3UzNRMVUwZDNaRmhaWW1GVWQwRXplSEJSVWpSUUsySlJNVE5yWVRsUGVHbEhhM0JxV1ZwRlZWaHpWMGhqTTJFS2MyZEpkMUJyVWt0S2JHZFVha3RrYmxwVFZYRmxaV2hGVXpNMk1VdDNjMjB6Y2xVdmJqbDZla1ZLVTB0dVNuTlFaSFp0UVZSQmFYZHlRMHR3YzB0NWRnb3JWekZqQ2kwdExTMHRSVTVFSUVORlVsUkpSa2xEUVZSRkxTMHRMUzBLIiwic2lnIjoiVFVWVlEwbFJSSEZJZEhCcmF6RmtNSEpOUjB4dFpqTnhaWFE1YWt4aGJHVXpTMVp1T0ZCdWVYZHdkM1EzYkc0ck9VRkpaMGM1UTBwMmRsVnRlV1Z0YUU1WlNIb3dSR1pLTkhaTlprdHJNVlJOWnl0dE0yaFNNRzFKVTFoS2IzTTkifV19LCJoYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiOTY4NGFhZjk0NTJjYmU1Mzg1NGVkODM2OWRmM2NiZDc2ZjM2YzI0YzI4YjE5MjlkN2JlMDExMzhjNmE0ZDAxZSJ9LCJwYXlsb2FkSGFzaCI6eyJhbGdvcml0aG0iOiJzaGEyNTYiLCJ2YWx1ZSI6ImM2OTQzNTJlMTkzYmM1OGVlNzZlZTc2NTQwNGRkMGQwN2IwNDM4ODVkMWQ5NzAyZTBlNGQxOGQxNzIzOWM1ZjMifX19fQ=="
+      }
+    ],
+    "timestampVerificationData": {
+      "rfc3161Timestamps": []
+    }
+  },
+  "verificationMaterial": {
+    "x509CertificateChain": {
+      "certificates": [
+        {
+          "rawBytes": "MIICnzCCAiagAwIBAgIURLDuAF2GTm00dzNAfjAcZo2iJwkwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjIxMjA2MjA1MzE4WhcNMjIxMjA2MjEwMzE4WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEhEeE87Zc5nBjYPLwOS3f+NCaeEYOoVyQDhflVvI6fXBa6ap9ovedYHJ3zZZLT3Ym50ccohgHCikADN8LDAiAdKOCAUUwggFBMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUUWvXL83zODviQlGpyTuv6f0VCtAwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0RAQH/BBUwE4ERYnJpYW5AZGVoYW1lci5jb20wLAYKKwYBBAGDvzABAQQeaHR0cHM6Ly9naXRodWIuY29tL2xvZ2luL29hdXRoMIGKBgorBgEEAdZ5AgQCBHwEegB4AHYA3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4AAAGE6TbfKAAABAMARzBFAiBR5shMtQRXRV1UoAwIPIMzDFjyRjXSVz/+7krSWaRlhAIhAIP6vJvbDT0fAM0PzSeIGZjgxd+OifyTZa6iXmyjVLOIMAoGCCqGSM49BAMDA2cAMGQCMDwWqjJnG4TKNEcgpKt5SGwdXYbaTwA3xpQR4P+bQ13ka9OxiGkpjYZEUXsWHc3asgIwPkRKJlgTjKdnZSUqeehES361Kwsm3rU/n9zzEJSKnJsPdvmATAiwrCKpsKyv+W1c"
+        },
+        {
+          "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
+        },
+        {
+          "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
+        }
+      ]
+    }
+  },
+  "dsseEnvelope": {
+    "payload": "ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YwLjEiLAogICJzdWJqZWN0IjogWwogICAgewogICAgICAibmFtZSI6ICJwa2c6bnBtLyU0MG5wbWNsaS9saWJucG1wdWJsaXNoLXRlc3RAMS4wLjAiLAogICAgICAiZGlnZXN0IjogewogICAgICAgICJzaGE1MTIiOiAiYmE1ZDJlY2ZhNjY3MjRlMWZhZmU0NzM0ODkzMzk0NjRhODZlZjRiNWM2OTcxOGFkOTQ3MzFmMzExYjJkNjA1NzhhODMxMzRiMzM5M2FmOTgyOGFlZjgzZGM1ZWRiMTkxMjZhOGFlZGE1NTI1M2MzMWVjMTkwOWNiZmQ0OGY2NmYiCiAgICAgIH0KICAgIH0KICBdLAogICJwcmVkaWNhdGVUeXBlIjogImh0dHBzOi8vc2xzYS5kZXYvcHJvdmVuYW5jZS92MC4yIiwKICAicHJlZGljYXRlIjogewogICAgImJ1aWxkVHlwZSI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaS9naGFAdjAiLAogICAgImJ1aWxkZXIiOiB7CiAgICAgICJpZCI6ICJodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUA5LjEuMyIKICAgIH0sCiAgICAiaW52b2NhdGlvbiI6IHsKICAgICAgImNvbmZpZ1NvdXJjZSI6IHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaUByZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfSwKICAgICAgICAiZW50cnlQb2ludCI6ICJwdWJsaXNoIgogICAgICB9LAogICAgICAicGFyYW1ldGVycyI6IHt9LAogICAgICAiZW52aXJvbm1lbnQiOiB7CiAgICAgICAgIkdJVEhVQl9FVkVOVF9OQU1FIjogIndvcmtmbG93X2Rpc3BhdGNoIiwKICAgICAgICAiR0lUSFVCX0pPQiI6ICJwcm9kdWN0aW9uIiwKICAgICAgICAiR0lUSFVCX1JFRiI6ICJyZWZzL2hlYWRzL2xhdGVzdCIsCiAgICAgICAgIkdJVEhVQl9SRUZfVFlQRSI6ICJicmFuY2giLAogICAgICAgICJHSVRIVUJfUkVQT1NJVE9SWSI6ICJucG0vY2xpIiwKICAgICAgICAiR0lUSFVCX1JVTl9BVFRFTVBUIjogIjEiLAogICAgICAgICJHSVRIVUJfUlVOX0lEIjogIjM2MjIzMTQzMDEiLAogICAgICAgICJHSVRIVUJfUlVOX05VTUJFUiI6ICIyNSIsCiAgICAgICAgIkdJVEhVQl9TSEEiOiAiMWNmMTc4MDRjNzYyNWM1Yzc4ZWM5Njk2ZjlmZWY3ZWNjNDRlMGM1OCIsCiAgICAgICAgIkdJVEhVQl9XT1JLRkxPVyI6ICJwdWJsaXNoIiwKICAgICAgICAiSU1BR0VfT1MiOiAidWJ1bnR1MjIiLAogICAgICAgICJJTUFHRV9WRVJTSU9OIjogIjIwMjIxMTI3LjEiLAogICAgICAgICJSVU5ORVJfQVJDSCI6ICJYNjQiLAogICAgICAgICJSVU5ORVJfTkFNRSI6ICJIb3N0ZWQgQWdlbnQiLAogICAgICAgICJSVU5ORVJfT1MiOiAiTGludXgiCiAgICAgIH0KICAgIH0sCiAgICAibWV0YWRhdGEiOiB7CiAgICAgICJidWlsZEludm9jYXRpb25JZCI6ICIzNjIyMzE0MzAxLTEiLAogICAgICAiY29tcGxldGVuZXNzIjogewogICAgICAgICJwYXJhbWV0ZXJzIjogZmFsc2UsCiAgICAgICAgImVudmlyb25tZW50IjogZmFsc2UsCiAgICAgICAgIm1hdGVyaWFscyI6IGZhbHNlCiAgICAgIH0sCiAgICAgICJyZXByb2R1Y2libGUiOiBmYWxzZQogICAgfSwKICAgICJtYXRlcmlhbHMiOiBbCiAgICAgIHsKICAgICAgICAidXJpIjogImdpdCtodHRwczovL2dpdGh1Yi5jb20vbnBtL2NsaSIsCiAgICAgICAgImRpZ2VzdCI6IHsKICAgICAgICAgICJzaGExIjogIjFjZjE3ODA0Yzc2MjVjNWM3OGVjOTY5NmY5ZmVmN2VjYzQ0ZTBjNTgiCiAgICAgICAgfQogICAgICB9CiAgICBdCiAgfQp9Cg==",
+    "payloadType": "application/vnd.in-toto+json",
+    "signatures": [
+      {
+        "sig": "MEUCIQDqHtpkk1d0rMGLmf3qet9jLale3KVn8Pnywpwt7ln+9AIgG9CJvvUmyemhNYHz0DfJ4vMfKk1TMg+m3hR0mISXJos=",
+        "keyid": ""
+      }
+    ]
+  }
+}
diff --git a/workspaces/libnpmpublish/test/publish.js b/workspaces/libnpmpublish/test/publish.js
index 0818c94fbe494..6daaeefc2c61b 100644
--- a/workspaces/libnpmpublish/test/publish.js
+++ b/workspaces/libnpmpublish/test/publish.js
@@ -8,6 +8,7 @@ const ssri = require('ssri')
 const t = require('tap')
 
 const MockRegistry = require('@npmcli/mock-registry')
+const mockGlobals = require('../../../test/fixtures/mock-globals.js')
 
 // TODO use registry.manifest (requires json date wrangling for nock)
 
@@ -596,3 +597,212 @@ t.test('other error code', async t => {
     'no retry on non-409'
   )
 })
+
+t.test('publish existing package with provenance in gha', async t => {
+  const oidcURL = 'https://mock.oidc'
+  const requestToken = 'decafbad'
+  // Set-up GHA environment variables
+  mockGlobals(t, {
+    'process.env': {
+      CI: true,
+      GITHUB_ACTIONS: true,
+      ACTIONS_ID_TOKEN_REQUEST_URL: oidcURL,
+      ACTIONS_ID_TOKEN_REQUEST_TOKEN: requestToken,
+    },
+  })
+  const { publish } = t.mock('..', { 'ci-info': t.mock('ci-info') })
+  const registry = new MockRegistry({
+    tap: t,
+    registry: opts.registry,
+    authorization: token,
+  })
+  const manifest = {
+    name: '@npmcli/libnpmpublish-test',
+    version: '1.0.0',
+    description: 'test libnpmpublish package',
+  }
+  const spec = npa(manifest.name)
+
+  // Data for mocking the OIDC token request
+  const oidcClaims = {
+    iss: 'https://oauth2.sigstore.dev/auth',
+    email: 'foo@bar.com',
+  }
+  const idToken = `.${Buffer.from(JSON.stringify(oidcClaims)).toString('base64')}.`
+
+  // Data for mocking Fulcio certifcate request
+  const fulcioURL = 'https://mock.fulcio'
+  const leafCertificate = `-----BEGIN CERTIFICATE-----\nabc\n-----END CERTIFICATE-----\n`
+  const rootCertificate = `-----BEGIN CERTIFICATE-----\nxyz\n-----END CERTIFICATE-----\n`
+  const certificate = [leafCertificate, rootCertificate].join()
+
+  // Data for mocking Rekor upload
+  const rekorURL = 'https://mock.rekor'
+  const signature = 'ABC123'
+  const b64Cert = Buffer.from(leafCertificate).toString('base64')
+  const uuid =
+    '69e5a0c1663ee4452674a5c9d5050d866c2ee31e2faaf79913aea7cc27293cf6'
+
+  const signatureBundle = {
+    kind: 'hashedrekord',
+    apiVersion: '0.0.1',
+    spec: {
+      signature: {
+        content: signature,
+        publicKey: { content: b64Cert },
+      },
+    },
+  }
+
+  const rekorEntry = {
+    [uuid]: {
+      body: Buffer.from(JSON.stringify(signatureBundle)).toString(
+        'base64'
+      ),
+      integratedTime: 1654015743,
+      logID:
+        'c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d',
+      logIndex: 2513258,
+      verification: {
+        /* eslint-disable-next-line max-len */
+        signedEntryTimestamp: 'MEUCIQD6CD7ZNLUipFoxzmSL/L8Ewic4SRkXN77UjfJZ7d/wAAIgatokSuX9Rg0iWxAgSfHMtcsagtDCQalU5IvXdQ+yLEA=',
+      },
+    },
+  }
+
+  const packument = {
+    _id: manifest.name,
+    name: manifest.name,
+    description: manifest.description,
+    'dist-tags': {
+      latest: '1.0.0',
+    },
+    versions: {
+      '1.0.0': {
+        _id: `${manifest.name}@${manifest.version}`,
+        _nodeVersion: process.versions.node,
+        ...manifest,
+        dist: {
+          shasum,
+          integrity: integrity.sha512[0].toString(),
+          /* eslint-disable-next-line max-len */
+          tarball: 'http://mock.reg/@npmcli/libnpmpublish-test/-/@npmcli/libnpmpublish-test-1.0.0.tgz',
+        },
+      },
+    },
+    access: 'public',
+    _attachments: {
+      '@npmcli/libnpmpublish-test-1.0.0.tgz': {
+        content_type: 'application/octet-stream',
+        data: tarData.toString('base64'),
+        length: tarData.length,
+      },
+      '@npmcli/libnpmpublish-test-1.0.0.sigstore': {
+        // Can't match data against static value as signature is always
+        // different.
+        // Can't match length because in github actions certain environment
+        // variables are present that are not present when running locally,
+        // changing the payload size.
+        content_type: 'application/vnd.dev.sigstore.bundle+json;version=0.1',
+      },
+    },
+  }
+
+  const oidcSrv = MockRegistry.tnock(t, oidcURL)
+  oidcSrv.get('/?audience=sigstore', undefined, {
+    authorization: `Bearer ${requestToken}`,
+  }).reply(200, { value: idToken })
+
+  const fulcioSrv = MockRegistry.tnock(t, fulcioURL)
+  fulcioSrv.matchHeader('Accept', 'application/pem-certificate-chain')
+    .matchHeader('Content-Type', 'application/json')
+    .matchHeader('Authorization', `Bearer ${idToken}`)
+    .post('/api/v1/signingCert', {
+      publicKey: { content: /.+/i },
+      signedEmailAddress: /.+/i,
+    })
+    .reply(200, certificate)
+
+  const rekorSrv = MockRegistry.tnock(t, rekorURL)
+  rekorSrv
+    .matchHeader('Accept', 'application/json')
+    .matchHeader('Content-Type', 'application/json')
+    .post('/api/v1/log/entries')
+    .reply(201, rekorEntry)
+
+  registry.getVisibility({ spec, visibility: { public: true } })
+  registry.nock.put(`/${spec.escapedName}`, body => {
+    return t.match(body, packument, 'posted packument matches expectations')
+  }).reply(201, {})
+
+  const ret = await publish(manifest, tarData, {
+    ...opts,
+    provenance: true,
+    fulcioURL: fulcioURL,
+    rekorURL: rekorURL,
+  })
+  t.ok(ret, 'publish succeeded')
+})
+
+t.test('publish new/private package with provenance in gha - no access', async t => {
+  const oidcURL = 'https://mock.oidc'
+  const requestToken = 'decafbad'
+  mockGlobals(t, {
+    'process.env': {
+      CI: true,
+      GITHUB_ACTIONS: true,
+      ACTIONS_ID_TOKEN_REQUEST_URL: oidcURL,
+      ACTIONS_ID_TOKEN_REQUEST_TOKEN: requestToken,
+    },
+  })
+  const { publish } = t.mock('..', { 'ci-info': t.mock('ci-info') })
+  const registry = new MockRegistry({
+    tap: t,
+    registry: opts.registry,
+    authorization: token,
+    strict: true,
+  })
+  const manifest = {
+    name: '@npmcli/libnpmpublish-test',
+    version: '1.0.0',
+    description: 'test libnpmpublish package',
+  }
+  const spec = npa(manifest.name)
+  registry.getVisibility({ spec, visibility: { public: false } })
+
+  await t.rejects(
+    publish(manifest, Buffer.from(''), {
+      ...opts,
+      access: null,
+      provenance: true,
+    }),
+    { code: 'EUSAGE' }
+  )
+})
+
+t.test('automatic provenance in unsupported environment', async t => {
+  mockGlobals(t, {
+    'process.env': {
+      CI: false,
+      GITHUB_ACTIONS: false,
+    },
+  })
+  const { publish } = t.mock('..', { 'ci-info': t.mock('ci-info') })
+  const manifest = {
+    name: '@npmcli/libnpmpublish-test',
+    version: '1.0.0',
+    description: 'test libnpmpublish package',
+  }
+
+  await t.rejects(
+    publish(manifest, Buffer.from(''), {
+      ...opts,
+      access: null,
+      provenance: true,
+    }),
+    {
+      message: /not supported/,
+      code: 'EUSAGE',
+    }
+  )
+})

From cc61923daf8a88942d9ccd22bf58bdd978321565 Mon Sep 17 00:00:00 2001
From: Brian DeHamer <bdehamer@github.com>
Date: Thu, 9 Feb 2023 12:05:06 -0800
Subject: [PATCH 2/2] fix: refactor error reporting in audit command

Primary work authored by [@wraithgar](https://github.com/wraithgar).
---
 lib/commands/audit.js                         | 130 ++++++++----------
 .../test/lib/commands/audit.js.test.cjs       |  31 +++--
 test/lib/commands/audit.js                    |  20 +--
 3 files changed, 89 insertions(+), 92 deletions(-)

diff --git a/lib/commands/audit.js b/lib/commands/audit.js
index 13886ea6350b6..192b3b9663d6c 100644
--- a/lib/commands/audit.js
+++ b/lib/commands/audit.js
@@ -25,7 +25,6 @@ class VerifySignatures {
     this.checkedPackages = new Set()
     this.auditedWithKeysCount = 0
     this.verifiedCount = 0
-    this.output = []
     this.exitCode = 0
   }
 
@@ -60,13 +59,13 @@ class VerifySignatures {
     const hasNoInvalidOrMissing = invalid.length === 0 && missing.length === 0
 
     if (!hasNoInvalidOrMissing) {
-      this.exitCode = 1
+      process.exitCode = 1
     }
 
     if (this.npm.config.get('json')) {
-      this.appendOutput(JSON.stringify({
-        invalid: this.makeJSON(invalid),
-        missing: this.makeJSON(missing),
+      this.npm.output(JSON.stringify({
+        invalid,
+        missing,
       }, null, 2))
       return
     }
@@ -76,54 +75,65 @@ class VerifySignatures {
     const auditedPlural = this.auditedWithKeysCount > 1 ? 's' : ''
     const timing = `audited ${this.auditedWithKeysCount} package${auditedPlural} in ` +
       `${Math.floor(Number(elapsed) / 1e9)}s`
-    this.appendOutput(`${timing}\n`)
+    this.npm.output(timing)
+    this.npm.output('')
 
     if (this.verifiedCount) {
       const verifiedBold = this.npm.chalk.bold('verified')
-      const msg = this.verifiedCount === 1 ?
-        `${this.verifiedCount} package has a ${verifiedBold} registry signature\n` :
-        `${this.verifiedCount} packages have ${verifiedBold} registry signatures\n`
-      this.appendOutput(msg)
+      if (this.verifiedCount === 1) {
+        this.npm.output(`${this.verifiedCount} package has a ${verifiedBold} registry signature`)
+      } else {
+        this.npm.output(`${this.verifiedCount} packages have ${verifiedBold} registry signatures`)
+      }
+      this.npm.output('')
     }
 
     if (missing.length) {
       const missingClr = this.npm.chalk.bold(this.npm.chalk.red('missing'))
-      const msg = missing.length === 1 ?
-        `package has a ${missingClr} registry signature` :
-        `packages have ${missingClr} registry signatures`
-      this.appendOutput(
-        `${missing.length} ${msg} but the registry is ` +
-        `providing signing keys:\n`
+      if (missing.length === 1) {
+        /* eslint-disable-next-line max-len */
+        this.npm.output(`1 package has a ${missingClr} registry signature but the registry is providing signing keys:`)
+      } else {
+        /* eslint-disable-next-line max-len */
+        this.npm.output(`${missing.length} packages have ${missingClr} registry signatures but the registry is providing signing keys:`)
+      }
+      this.npm.output('')
+      missing.map(m =>
+        this.npm.output(`${this.npm.chalk.red(`${m.name}@${m.version}`)} (${m.registry})`)
       )
-      this.appendOutput(this.humanOutput(missing))
     }
 
     if (invalid.length) {
+      if (missing.length) {
+        this.npm.output('')
+      }
       const invalidClr = this.npm.chalk.bold(this.npm.chalk.red('invalid'))
-      const msg = invalid.length === 1 ?
-        `${invalid.length} package has an ${invalidClr} registry signature:\n` :
-        `${invalid.length} packages have ${invalidClr} registry signatures:\n`
-      this.appendOutput(
-        `${missing.length ? '\n' : ''}${msg}`
+      // We can have either invalid signatures or invalid provenance
+      const invalidSignatures = this.invalid.filter(i => i.code === 'EINTEGRITYSIGNATURE')
+      if (invalidSignatures.length === 1) {
+        this.npm.output(`1 package has an ${invalidClr} registry signature:`)
+      // } else if (invalidSignatures.length > 1) {
+      } else {
+        // TODO move this back to an else if once provenance attestation audit is added
+        /* eslint-disable-next-line max-len */
+        this.npm.output(`${invalidSignatures.length} packages have ${invalidClr} registry signatures:`)
+      }
+      this.npm.output('')
+      invalidSignatures.map(i =>
+        this.npm.output(`${this.npm.chalk.red(`${i.name}@${i.version}`)} (${i.registry})`)
       )
-      this.appendOutput(this.humanOutput(invalid))
-      const tamperMsg = invalid.length === 1 ?
-        `\nSomeone might have tampered with this package since it was ` +
-        `published on the registry!\n` :
-        `\nSomeone might have tampered with these packages since they where ` +
-        `published on the registry!\n`
-      this.appendOutput(tamperMsg)
+      this.npm.output('')
+      if (invalid.length === 1) {
+        /* eslint-disable-next-line max-len */
+        this.npm.output(`Someone might have tampered with this package since it was published on the registry!`)
+      } else {
+        /* eslint-disable-next-line max-len */
+        this.npm.output(`Someone might have tampered with these packages since they were published on the registry!`)
+      }
+      this.npm.output('')
     }
   }
 
-  appendOutput (...args) {
-    this.output.push(...args.flat())
-  }
-
-  report () {
-    return { report: this.output.join('\n'), exitCode: this.exitCode }
-  }
-
   getEdgesOut (nodes, filterSet) {
     const edges = new Set()
     const registries = new Set()
@@ -249,11 +259,12 @@ class VerifySignatures {
       ...this.npm.flatOptions,
     })
     const signatures = _signatures || []
-    return {
+    const result = {
       integrity,
       signatures,
       resolved,
     }
+    return result
   }
 
   async getVerifiedInfo (edge) {
@@ -286,51 +297,33 @@ class VerifySignatures {
         this.verifiedCount += 1
       } else if (keys.length) {
         this.missing.push({
-          name,
-          version,
-          location,
-          resolved,
           integrity,
+          location,
+          name,
           registry,
+          resolved,
+          version,
         })
       }
     } catch (e) {
       if (e.code === 'EINTEGRITYSIGNATURE') {
-        const { signature, keyid, integrity, resolved } = e
         this.invalid.push({
+          code: e.code,
+          integrity: e.integrity,
+          keyid: e.keyid,
+          location,
           name,
+          registry,
+          resolved: e.resolved,
+          signature: e.signature,
           type,
           version,
-          resolved,
-          location,
-          integrity,
-          registry,
-          signature,
-          keyid,
         })
       } else {
         throw e
       }
     }
   }
-
-  humanOutput (list) {
-    return list.map(v =>
-      `${this.npm.chalk.red(`${v.name}@${v.version}`)} (${v.registry})`
-    ).join('\n')
-  }
-
-  makeJSON (deps) {
-    return deps.map(d => ({
-      name: d.name,
-      version: d.version,
-      location: d.location,
-      resolved: d.resolved,
-      integrity: d.integrity,
-      signature: d.signature,
-      keyid: d.keyid,
-    }))
-  }
 }
 
 class Audit extends ArboristWorkspaceCmd {
@@ -432,9 +425,6 @@ class Audit extends ArboristWorkspaceCmd {
 
     const verify = new VerifySignatures(tree, filterSet, this.npm, { ...opts })
     await verify.run()
-    const result = verify.report()
-    process.exitCode = process.exitCode || result.exitCode
-    this.npm.output(result.report)
   }
 }
 
diff --git a/tap-snapshots/test/lib/commands/audit.js.test.cjs b/tap-snapshots/test/lib/commands/audit.js.test.cjs
index 9262e0b51aa2d..c95e30b26783d 100644
--- a/tap-snapshots/test/lib/commands/audit.js.test.cjs
+++ b/tap-snapshots/test/lib/commands/audit.js.test.cjs
@@ -52,21 +52,25 @@ exports[`test/lib/commands/audit.js TAP audit signatures json output with invali
 {
   "invalid": [
     {
-      "name": "kms-demo",
-      "version": "1.0.0",
+      "code": "EINTEGRITYSIGNATURE",
+      "integrity": "sha512-QqZ7VJ/8xPkS9s2IWB7Shj3qTJdcRyeXKbPQnsZjsPEwvutGv0EGeVchPcauoiDFJlGbZMFq5GDCurAGNSghJQ==",
+      "keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
       "location": "node_modules/kms-demo",
+      "name": "kms-demo",
+      "registry": "https://registry.npmjs.org/",
       "resolved": "https://registry.npmjs.org/kms-demo/-/kms-demo-1.0.0.tgz",
-      "integrity": "sha512-QqZ7VJ/8xPkS9s2IWB7Shj3qTJdcRyeXKbPQnsZjsPEwvutGv0EGeVchPcauoiDFJlGbZMFq5GDCurAGNSghJQ==",
       "signature": "bogus",
-      "keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA"
+      "type": "dependencies",
+      "version": "1.0.0"
     }
   ],
   "missing": [
     {
-      "name": "async",
-      "version": "1.1.1",
       "location": "node_modules/async",
-      "resolved": "https://registry.npmjs.org/async/-/async-1.1.1.tgz"
+      "name": "async",
+      "registry": "https://registry.npmjs.org/",
+      "resolved": "https://registry.npmjs.org/async/-/async-1.1.1.tgz",
+      "version": "1.1.1"
     }
   ]
 }
@@ -76,13 +80,16 @@ exports[`test/lib/commands/audit.js TAP audit signatures json output with invali
 {
   "invalid": [
     {
-      "name": "kms-demo",
-      "version": "1.0.0",
+      "code": "EINTEGRITYSIGNATURE",
+      "integrity": "sha512-QqZ7VJ/8xPkS9s2IWB7Shj3qTJdcRyeXKbPQnsZjsPEwvutGv0EGeVchPcauoiDFJlGbZMFq5GDCurAGNSghJQ==",
+      "keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
       "location": "node_modules/kms-demo",
+      "name": "kms-demo",
+      "registry": "https://registry.npmjs.org/",
       "resolved": "https://registry.npmjs.org/kms-demo/-/kms-demo-1.0.0.tgz",
-      "integrity": "sha512-QqZ7VJ/8xPkS9s2IWB7Shj3qTJdcRyeXKbPQnsZjsPEwvutGv0EGeVchPcauoiDFJlGbZMFq5GDCurAGNSghJQ==",
       "signature": "bogus",
-      "keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA"
+      "type": "dependencies",
+      "version": "1.0.0"
     }
   ],
   "missing": []
@@ -204,7 +211,7 @@ audited 2 packages in xxx
 async@1.1.1 (https://registry.npmjs.org/)
 kms-demo@1.0.0 (https://registry.npmjs.org/)
 
-Someone might have tampered with these packages since they where published on the registry!
+Someone might have tampered with these packages since they were published on the registry!
 
 `
 
diff --git a/test/lib/commands/audit.js b/test/lib/commands/audit.js
index bba74407cb3fe..fdb53c6aa4f57 100644
--- a/test/lib/commands/audit.js
+++ b/test/lib/commands/audit.js
@@ -758,7 +758,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -791,7 +791,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -914,7 +914,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -1095,7 +1095,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), JSON.stringify({ invalid: [], missing: [] }, null, 2))
     t.matchSnapshot(joinedOutput())
   })
@@ -1148,7 +1148,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -1257,7 +1257,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -1401,7 +1401,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 2 packages/)
     t.matchSnapshot(joinedOutput())
   })
@@ -1447,7 +1447,7 @@ t.test('audit signatures', async t => {
 
     await npm.exec('audit', ['signatures'])
 
-    t.equal(process.exitCode, 0, 'should exit successfully')
+    t.notOk(process.exitCode, 'should exit successfully')
     t.match(joinedOutput(), /audited 1 package/)
     t.matchSnapshot(joinedOutput())
   })
@@ -1625,7 +1625,7 @@ t.test('audit signatures', async t => {
 
       await npm.exec('audit', ['signatures'])
 
-      t.equal(process.exitCode, 0, 'should exit successfully')
+      t.notOk(process.exitCode, 'should exit successfully')
       t.match(joinedOutput(), /audited 3 packages/)
       t.matchSnapshot(joinedOutput())
     })
@@ -1678,7 +1678,7 @@ t.test('audit signatures', async t => {
 
       await npm.exec('audit', ['signatures'])
 
-      t.equal(process.exitCode, 0, 'should exit successfully')
+      t.notOk(process.exitCode, 'should exit successfully')
       t.match(joinedOutput(), /audited 2 packages/)
       t.matchSnapshot(joinedOutput())
     })