diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 324dbb190ca34..7ff1866331389 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -565,7 +565,7 @@ graph LR; npm-->remark-github; npm-->remark; npm-->semver; - npm-->sigstore; + npm-->sigstore-tuf["@sigstore/tuf"]; npm-->spawk; npm-->ssri; npm-->supports-color; @@ -768,8 +768,14 @@ graph LR; semver-->lru-cache; shebang-command-->shebang-regex; sigstore-->make-fetch-happen; + sigstore-->sigstore-bundle["@sigstore/bundle"]; sigstore-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; + sigstore-->sigstore-sign["@sigstore/sign"]; sigstore-->sigstore-tuf["@sigstore/tuf"]; + sigstore-bundle-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; + sigstore-sign-->make-fetch-happen; + sigstore-sign-->sigstore-bundle["@sigstore/bundle"]; + sigstore-sign-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; sigstore-tuf-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; sigstore-tuf-->tuf-js; socks-->ip; diff --git a/lib/commands/audit.js b/lib/commands/audit.js index 500620f2cd01b..de5483109d598 100644 --- a/lib/commands/audit.js +++ b/lib/commands/audit.js @@ -4,7 +4,7 @@ const localeCompare = require('@isaacs/string-locale-compare')('en') const npa = require('npm-package-arg') const pacote = require('pacote') const pMap = require('p-map') -const { sigstore } = require('sigstore') +const tufClient = require('@sigstore/tuf') const ArboristWorkspaceCmd = require('../arborist-cmd.js') const auditError = require('../utils/audit-error.js') @@ -38,8 +38,8 @@ class VerifySignatures { throw new Error('found no installed dependencies to audit') } - const tuf = await sigstore.tuf.client({ - tufCachePath: this.opts.tufCache, + const tuf = await tufClient.initTUF({ + cachePath: this.opts.tufCache, retry: this.opts.retry, timeout: this.opts.timeout, }) diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 56c6317d7d0e4..6073ab13442a5 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -36,8 +36,25 @@ !/@pkgjs/parseargs !/@sigstore/ /@sigstore/* +!/@sigstore/bundle +!/@sigstore/bundle/node_modules/ +/@sigstore/bundle/node_modules/* +!/@sigstore/bundle/node_modules/@sigstore/ +/@sigstore/bundle/node_modules/@sigstore/* +!/@sigstore/bundle/node_modules/@sigstore/protobuf-specs !/@sigstore/protobuf-specs +!/@sigstore/sign +!/@sigstore/sign/node_modules/ +/@sigstore/sign/node_modules/* +!/@sigstore/sign/node_modules/@sigstore/ +/@sigstore/sign/node_modules/@sigstore/* +!/@sigstore/sign/node_modules/@sigstore/protobuf-specs !/@sigstore/tuf +!/@sigstore/tuf/node_modules/ +/@sigstore/tuf/node_modules/* +!/@sigstore/tuf/node_modules/@sigstore/ +/@sigstore/tuf/node_modules/@sigstore/* +!/@sigstore/tuf/node_modules/@sigstore/protobuf-specs !/@tootallnate/ /@tootallnate/* !/@tootallnate/once @@ -247,6 +264,18 @@ !/sigstore !/sigstore/node_modules/ /sigstore/node_modules/* +!/sigstore/node_modules/@sigstore/ +/sigstore/node_modules/@sigstore/* +!/sigstore/node_modules/@sigstore/tuf +!/sigstore/node_modules/@sigstore/tuf/node_modules/ +/sigstore/node_modules/@sigstore/tuf/node_modules/* +!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/ +/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/* +!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs +!/sigstore/node_modules/@tufjs/ +/sigstore/node_modules/@tufjs/* +!/sigstore/node_modules/@tufjs/canonical-json +!/sigstore/node_modules/@tufjs/models !/sigstore/node_modules/cacache !/sigstore/node_modules/cacache/node_modules/ /sigstore/node_modules/cacache/node_modules/* @@ -254,6 +283,7 @@ !/sigstore/node_modules/lru-cache !/sigstore/node_modules/make-fetch-happen !/sigstore/node_modules/minipass +!/sigstore/node_modules/tuf-js !/smart-buffer !/socks-proxy-agent !/socks @@ -280,15 +310,6 @@ !/tiny-relative-date !/treeverse !/tuf-js -!/tuf-js/node_modules/ -/tuf-js/node_modules/* -!/tuf-js/node_modules/cacache -!/tuf-js/node_modules/cacache/node_modules/ -/tuf-js/node_modules/cacache/node_modules/* -!/tuf-js/node_modules/cacache/node_modules/minipass -!/tuf-js/node_modules/lru-cache -!/tuf-js/node_modules/make-fetch-happen -!/tuf-js/node_modules/minipass !/unique-filename !/unique-slug !/util-deprecate diff --git a/node_modules/@sigstore/bundle/LICENSE b/node_modules/@sigstore/bundle/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/bundle/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@sigstore/bundle/dist/build.js new file mode 100644 index 0000000000000..0ccea62eaba87 --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/build.js @@ -0,0 +1,89 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(options) { + return { + mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE, + content: { + $case: 'messageSignature', + messageSignature: { + messageDigest: { + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, + digest: options.digest, + }, + signature: options.signature, + }, + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +exports.toMessageSignatureBundle = toMessageSignatureBundle; +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(options) { + return { + mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE, + content: { + $case: 'dsseEnvelope', + dsseEnvelope: toEnvelope(options), + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +exports.toDSSEBundle = toDSSEBundle; +function toEnvelope(options) { + return { + payloadType: options.artifactType, + payload: options.artifact, + signatures: [toSignature(options)], + }; +} +function toSignature(options) { + return { + keyid: options.keyHint || '', + sig: options.signature, + }; +} +// Verification material +function toVerificationMaterial(options) { + return { + content: toKeyContent(options), + tlogEntries: [], + timestampVerificationData: { rfc3161Timestamps: [] }, + }; +} +function toKeyContent(options) { + if (options.certificate) { + return { + $case: 'x509CertificateChain', + x509CertificateChain: { + certificates: [{ rawBytes: options.certificate }], + }, + }; + } + else { + return { + $case: 'publicKey', + publicKey: { + hint: options.keyHint || '', + }, + }; + } +} diff --git a/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/@sigstore/bundle/dist/bundle.js new file mode 100644 index 0000000000000..8c01e2d19c5ec --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/bundle.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; +exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; +exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; +// Type guards for bundle variants. +function isBundleWithCertificateChain(b) { + return b.verificationMaterial.content.$case === 'x509CertificateChain'; +} +exports.isBundleWithCertificateChain = isBundleWithCertificateChain; +function isBundleWithPublicKey(b) { + return b.verificationMaterial.content.$case === 'publicKey'; +} +exports.isBundleWithPublicKey = isBundleWithPublicKey; +function isBundleWithMessageSignature(b) { + return b.content.$case === 'messageSignature'; +} +exports.isBundleWithMessageSignature = isBundleWithMessageSignature; +function isBundleWithDsseEnvelope(b) { + return b.content.$case === 'dsseEnvelope'; +} +exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; diff --git a/node_modules/@sigstore/bundle/dist/error.js b/node_modules/@sigstore/bundle/dist/error.js new file mode 100644 index 0000000000000..f84295323b812 --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/error.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValidationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ValidationError extends Error { + constructor(message, fields) { + super(message); + this.fields = fields; + } +} +exports.ValidationError = ValidationError; diff --git a/node_modules/@sigstore/bundle/dist/index.js b/node_modules/@sigstore/bundle/dist/index.js new file mode 100644 index 0000000000000..b016a16d11cc0 --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/index.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var build_1 = require("./build"); +Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); +Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); +Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); +Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); +Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); +Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); +var serialized_1 = require("./serialized"); +Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); +Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); +Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); +Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); +var validate_1 = require("./validate"); +Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); +Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); +Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); +Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/@sigstore/bundle/dist/serialized.js new file mode 100644 index 0000000000000..f1073358cacfd --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/serialized.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const validate_1 = require("./validate"); +const bundleFromJSON = (obj) => { + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); + (0, validate_1.assertBundle)(bundle); + return bundle; +}; +exports.bundleFromJSON = bundleFromJSON; +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; +const envelopeFromJSON = (obj) => { + return protobuf_specs_1.Envelope.fromJSON(obj); +}; +exports.envelopeFromJSON = envelopeFromJSON; +const envelopeToJSON = (envelope) => { + return protobuf_specs_1.Envelope.toJSON(envelope); +}; +exports.envelopeToJSON = envelopeToJSON; diff --git a/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/@sigstore/bundle/dist/utility.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/utility.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/@sigstore/bundle/dist/validate.js new file mode 100644 index 0000000000000..015b6dfc58dd7 --- /dev/null +++ b/node_modules/@sigstore/bundle/dist/validate.js @@ -0,0 +1,160 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertBundleLatest = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("./bundle"); +const error_1 = require("./error"); +// Performs basic validation of a Sigstore bundle to ensure that all required +// fields are populated. This is not a complete validation of the bundle, but +// rather a check that the bundle is in a valid state to be processed by the +// rest of the code. +function assertBundle(b) { + const invalidValues = []; + // Media type validation + if (b.mediaType === undefined || + !b.mediaType.startsWith('application/vnd.dev.sigstore.bundle+json;version=')) { + invalidValues.push('mediaType'); + } + // Content-related validation + if (b.content === undefined) { + invalidValues.push('content'); + } + else { + switch (b.content.$case) { + case 'messageSignature': + if (b.content.messageSignature.messageDigest === undefined) { + invalidValues.push('content.messageSignature.messageDigest'); + } + else { + if (b.content.messageSignature.messageDigest.digest.length === 0) { + invalidValues.push('content.messageSignature.messageDigest.digest'); + } + } + if (b.content.messageSignature.signature.length === 0) { + invalidValues.push('content.messageSignature.signature'); + } + break; + case 'dsseEnvelope': + if (b.content.dsseEnvelope.payload.length === 0) { + invalidValues.push('content.dsseEnvelope.payload'); + } + if (b.content.dsseEnvelope.signatures.length !== 1) { + invalidValues.push('content.dsseEnvelope.signatures'); + } + else { + if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { + invalidValues.push('content.dsseEnvelope.signatures[0].sig'); + } + } + break; + } + } + // Verification material-related validation + if (b.verificationMaterial === undefined) { + invalidValues.push('verificationMaterial'); + } + else { + if (b.verificationMaterial.content === undefined) { + invalidValues.push('verificationMaterial.content'); + } + else { + switch (b.verificationMaterial.content.$case) { + case 'x509CertificateChain': + if (b.verificationMaterial.content.x509CertificateChain.certificates + .length === 0) { + invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); + } + b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { + if (cert.rawBytes.length === 0) { + invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); + } + }); + break; + } + } + if (b.verificationMaterial.tlogEntries === undefined) { + invalidValues.push('verificationMaterial.tlogEntries'); + } + else { + if (b.verificationMaterial.tlogEntries.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.logId === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); + } + if (entry.kindVersion === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); + } + }); + } + } + } + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +exports.assertBundle = assertBundle; +// Asserts that the given bundle conforms to the v0.1 bundle format. +function assertBundleV01(b) { + const invalidValues = []; + if (b.mediaType && b.mediaType !== bundle_1.BUNDLE_V01_MEDIA_TYPE) { + invalidValues.push('mediaType'); + } + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionPromise === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); + } + }); + } + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); + } +} +exports.assertBundleV01 = assertBundleV01; +// Type guard to determine if Bundle is a v0.1 bundle. +function isBundleV01(b) { + try { + assertBundleV01(b); + return true; + } + catch (e) { + return false; + } +} +exports.isBundleV01 = isBundleV01; +// Asserts that the given bundle conforms to the newest (0.2) bundle format. +function assertBundleLatest(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionProof === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); + } + else { + if (entry.inclusionProof.checkpoint === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); + } + } + }); + } + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); + } +} +exports.assertBundleLatest = assertBundleLatest; diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js new file mode 100644 index 0000000000000..0c367a8384454 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -0,0 +1,89 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Envelope = void 0; +function createBaseEnvelope() { + return { payload: Buffer.alloc(0), payloadType: "", signatures: [] }; +} +exports.Envelope = { + fromJSON(object) { + return { + payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), + payloadType: isSet(object.payloadType) ? String(object.payloadType) : "", + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.payload !== undefined && + (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0))); + message.payloadType !== undefined && (obj.payloadType = message.payloadType); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined); + } + else { + obj.signatures = []; + } + return obj; + }, +}; +function createBaseSignature() { + return { sig: Buffer.alloc(0), keyid: "" }; +} +exports.Signature = { + fromJSON(object) { + return { + sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), + keyid: isSet(object.keyid) ? String(object.keyid) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0))); + message.keyid !== undefined && (obj.keyid = message.keyid); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 0000000000000..073093b8371a8 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,185 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +function createBaseCloudEvent() { + return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined }; +} +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? String(object.id) : "", + source: isSet(object.source) ? String(object.source) : "", + specVersion: isSet(object.specVersion) ? String(object.specVersion) : "", + type: isSet(object.type) ? String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.id !== undefined && (obj.id = message.id); + message.source !== undefined && (obj.source = message.source); + message.specVersion !== undefined && (obj.specVersion = message.specVersion); + message.type !== undefined && (obj.type = message.type); + obj.attributes = {}; + if (message.attributes) { + Object.entries(message.attributes).forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + message.data?.$case === "binaryData" && + (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined); + message.data?.$case === "textData" && (obj.textData = message.data?.textData); + message.data?.$case === "protoData" && + (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_AttributesEntry() { + return { key: "", value: undefined }; +} +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && + (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_CloudEventAttributeValue() { + return { attr: undefined }; +} +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean); + message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger)); + message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString); + message.attr?.$case === "ceBytes" && + (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined); + message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri); + message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef); + message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString()); + return obj; + }, +}; +function createBaseCloudEventBatch() { + return { events: [] }; +} +exports.CloudEventBatch = { + fromJSON(object) { + return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined); + } + else { + obj.events = []; + } + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js new file mode 100644 index 0000000000000..da627499ad765 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -0,0 +1,119 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0; +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +var FieldBehavior; +(function (FieldBehavior) { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED"; + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL"; + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED"; + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY"; + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY"; + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE"; + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST"; +})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {})); +function fieldBehaviorFromJSON(object) { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; +function fieldBehaviorToJSON(object) { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorToJSON = fieldBehaviorToJSON; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 0000000000000..6b3f3c97a6647 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,65 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +function createBaseAny() { + return { typeUrl: "", value: Buffer.alloc(0) }; +} +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js new file mode 100644 index 0000000000000..d429aac846043 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -0,0 +1,1308 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0; +var FieldDescriptorProto_Type; +(function (FieldDescriptorProto_Type) { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT"; + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64"; + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING"; + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP"; + /** TYPE_MESSAGE - Length-delimited aggregate. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE"; + /** TYPE_BYTES - New in version 2. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64"; + /** TYPE_SINT32 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32"; + /** TYPE_SINT64 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64"; +})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {})); +function fieldDescriptorProto_TypeFromJSON(object) { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; +function fieldDescriptorProto_TypeToJSON(object) { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; +var FieldDescriptorProto_Label; +(function (FieldDescriptorProto_Label) { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED"; +})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {})); +function fieldDescriptorProto_LabelFromJSON(object) { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; +function fieldDescriptorProto_LabelToJSON(object) { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; +/** Generated classes can be optimized for speed or code size. */ +var FileOptions_OptimizeMode; +(function (FileOptions_OptimizeMode) { + /** SPEED - Generate complete code for parsing, serialization, */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED"; + /** CODE_SIZE - etc. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE"; + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME"; +})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {})); +function fileOptions_OptimizeModeFromJSON(object) { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; +function fileOptions_OptimizeModeToJSON(object) { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; +var FieldOptions_CType; +(function (FieldOptions_CType) { + /** STRING - Default mode. */ + FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING"; + FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD"; + FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE"; +})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {})); +function fieldOptions_CTypeFromJSON(object) { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; +function fieldOptions_CTypeToJSON(object) { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; +var FieldOptions_JSType; +(function (FieldOptions_JSType) { + /** JS_NORMAL - Use the default type. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL"; + /** JS_STRING - Use JavaScript strings. */ + FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING"; + /** JS_NUMBER - Use JavaScript numbers. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER"; +})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {})); +function fieldOptions_JSTypeFromJSON(object) { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; +function fieldOptions_JSTypeToJSON(object) { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +var MethodOptions_IdempotencyLevel; +(function (MethodOptions_IdempotencyLevel) { + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN"; + /** NO_SIDE_EFFECTS - implies idempotent */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS"; + /** IDEMPOTENT - idempotent, but may have side effects */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT"; +})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {})); +function methodOptions_IdempotencyLevelFromJSON(object) { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; +function methodOptions_IdempotencyLevelToJSON(object) { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; +function createBaseFileDescriptorSet() { + return { file: [] }; +} +exports.FileDescriptorSet = { + fromJSON(object) { + return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined); + } + else { + obj.file = []; + } + return obj; + }, +}; +function createBaseFileDescriptorProto() { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} +exports.FileDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } + else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined); + } + else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined && + (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, +}; +function createBaseDescriptorProto() { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +exports.DescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } + else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined); + } + else { + obj.oneofDecl = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseDescriptorProto_ExtensionRange() { + return { start: 0, end: 0, options: undefined }; +} +exports.DescriptorProto_ExtensionRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined && + (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseDescriptorProto_ReservedRange() { + return { start: 0, end: 0 }; +} +exports.DescriptorProto_ReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseExtensionRangeOptions() { + return { uninterpretedOption: [] }; +} +exports.ExtensionRangeOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldDescriptorProto() { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} +exports.FieldDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, +}; +function createBaseOneofDescriptorProto() { + return { name: "", options: undefined }; +} +exports.OneofDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseEnumDescriptorProto() { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} +exports.EnumDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined); + } + else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseEnumDescriptorProto_EnumReservedRange() { + return { start: 0, end: 0 }; +} +exports.EnumDescriptorProto_EnumReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseEnumValueDescriptorProto() { + return { name: "", number: 0, options: undefined }; +} +exports.EnumValueDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined && + (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseServiceDescriptorProto() { + return { name: "", method: [], options: undefined }; +} +exports.ServiceDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined); + } + else { + obj.method = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseMethodDescriptorProto() { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} +exports.MethodDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined && + (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, +}; +function createBaseFileOptions() { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} +exports.FileOptions = { + fromJSON(object) { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined && + (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMessageOptions() { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} +exports.MessageOptions = { + fromJSON(object) { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined && + (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldOptions() { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [], + }; +} +exports.FieldOptions = { + fromJSON(object) { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseOneofOptions() { + return { uninterpretedOption: [] }; +} +exports.OneofOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumOptions() { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} +exports.EnumOptions = { + fromJSON(object) { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumValueOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.EnumValueOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseServiceOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.ServiceOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMethodOptions() { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} +exports.MethodOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined && + (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseUninterpretedOption() { + return { + name: [], + identifierValue: "", + positiveIntValue: "0", + negativeIntValue: "0", + doubleValue: 0, + stringValue: Buffer.alloc(0), + aggregateValue: "", + }; +} +exports.UninterpretedOption = { + fromJSON(object) { + return { + name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0", + negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0", + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined); + } + else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue); + message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined && + (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0))); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, +}; +function createBaseUninterpretedOption_NamePart() { + return { namePart: "", isExtension: false }; +} +exports.UninterpretedOption_NamePart = { + fromJSON(object) { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, +}; +function createBaseSourceCodeInfo() { + return { location: [] }; +} +exports.SourceCodeInfo = { + fromJSON(object) { + return { + location: Array.isArray(object?.location) + ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined); + } + else { + obj.location = []; + } + return obj; + }, +}; +function createBaseSourceCodeInfo_Location() { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} +exports.SourceCodeInfo_Location = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } + else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } + else { + obj.leadingDetachedComments = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo() { + return { annotation: [] }; +} +exports.GeneratedCodeInfo = { + fromJSON(object) { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } + else { + obj.annotation = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo_Annotation() { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} +exports.GeneratedCodeInfo_Annotation = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js new file mode 100644 index 0000000000000..159135fe87172 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js @@ -0,0 +1,24 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +function createBaseTimestamp() { + return { seconds: "0", nanos: 0 }; +} +exports.Timestamp = { + fromJSON(object) { + return { + seconds: isSet(object.seconds) ? String(object.seconds) : "0", + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + toJSON(message) { + const obj = {}; + message.seconds !== undefined && (obj.seconds = message.seconds); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js new file mode 100644 index 0000000000000..1ef3e1b3356b7 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js @@ -0,0 +1,106 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0; +/* eslint-disable */ +const envelope_1 = require("./envelope"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_rekor_1 = require("./sigstore_rekor"); +function createBaseTimestampVerificationData() { + return { rfc3161Timestamps: [] }; +} +exports.TimestampVerificationData = { + fromJSON(object) { + return { + rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps) + ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.rfc3161Timestamps) { + obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined); + } + else { + obj.rfc3161Timestamps = []; + } + return obj; + }, +}; +function createBaseVerificationMaterial() { + return { content: undefined, tlogEntries: [], timestampVerificationData: undefined }; +} +exports.VerificationMaterial = { + fromJSON(object) { + return { + content: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) } + : isSet(object.x509CertificateChain) + ? { + $case: "x509CertificateChain", + x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain), + } + : undefined, + tlogEntries: Array.isArray(object?.tlogEntries) + ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e)) + : [], + timestampVerificationData: isSet(object.timestampVerificationData) + ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.content?.$case === "publicKey" && + (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined); + message.content?.$case === "x509CertificateChain" && + (obj.x509CertificateChain = message.content?.x509CertificateChain + ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain) + : undefined); + if (message.tlogEntries) { + obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined); + } + else { + obj.tlogEntries = []; + } + message.timestampVerificationData !== undefined && + (obj.timestampVerificationData = message.timestampVerificationData + ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData) + : undefined); + return obj; + }, +}; +function createBaseBundle() { + return { mediaType: "", verificationMaterial: undefined, content: undefined }; +} +exports.Bundle = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + verificationMaterial: isSet(object.verificationMaterial) + ? exports.VerificationMaterial.fromJSON(object.verificationMaterial) + : undefined, + content: isSet(object.messageSignature) + ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) } + : isSet(object.dsseEnvelope) + ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial + ? exports.VerificationMaterial.toJSON(message.verificationMaterial) + : undefined); + message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature + ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature) + : undefined); + message.content?.$case === "dsseEnvelope" && + (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js new file mode 100644 index 0000000000000..bcd654e9154b9 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -0,0 +1,457 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0; +/* eslint-disable */ +const timestamp_1 = require("./google/protobuf/timestamp"); +/** + * Only a subset of the secure hash standard algorithms are supported. + * See for more + * details. + * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force + * any proto JSON serialization to emit the used hash algorithm, as default + * option is to *omit* the default value of an enum (which is the first + * value, represented by '0'. + */ +var HashAlgorithm; +(function (HashAlgorithm) { + HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED"; + HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256"; +})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {})); +function hashAlgorithmFromJSON(object) { + switch (object) { + case 0: + case "HASH_ALGORITHM_UNSPECIFIED": + return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; + case 1: + case "SHA2_256": + return HashAlgorithm.SHA2_256; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; +function hashAlgorithmToJSON(object) { + switch (object) { + case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: + return "HASH_ALGORITHM_UNSPECIFIED"; + case HashAlgorithm.SHA2_256: + return "SHA2_256"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmToJSON = hashAlgorithmToJSON; +/** + * Details of a specific public key, capturing the the key encoding method, + * and signature algorithm. + * To avoid the possibility of contradicting formats such as PKCS1 with + * ED25519 the valid permutations are listed as a linear set instead of a + * cartesian set (i.e one combined variable instead of two, one for encoding + * and one for the signature algorithm). + */ +var PublicKeyDetails; +(function (PublicKeyDetails) { + PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + /** PKCS1_RSA_PKCS1V5 - RSA */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5"; + /** PKCS1_RSA_PSS - See RFC8017 */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS"; + /** PKIX_ECDSA_P256_SHA_256 - ECDSA */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256"; + /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256"; + /** PKIX_ED25519 - Ed 25519 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519"; +})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {})); +function publicKeyDetailsFromJSON(object) { + switch (object) { + case 0: + case "PUBLIC_KEY_DETAILS_UNSPECIFIED": + return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; + case 1: + case "PKCS1_RSA_PKCS1V5": + return PublicKeyDetails.PKCS1_RSA_PKCS1V5; + case 2: + case "PKCS1_RSA_PSS": + return PublicKeyDetails.PKCS1_RSA_PSS; + case 3: + case "PKIX_RSA_PKCS1V5": + return PublicKeyDetails.PKIX_RSA_PKCS1V5; + case 4: + case "PKIX_RSA_PSS": + return PublicKeyDetails.PKIX_RSA_PSS; + case 5: + case "PKIX_ECDSA_P256_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; + case 6: + case "PKIX_ECDSA_P256_HMAC_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; + case 7: + case "PKIX_ED25519": + return PublicKeyDetails.PKIX_ED25519; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; +function publicKeyDetailsToJSON(object) { + switch (object) { + case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: + return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + case PublicKeyDetails.PKCS1_RSA_PKCS1V5: + return "PKCS1_RSA_PKCS1V5"; + case PublicKeyDetails.PKCS1_RSA_PSS: + return "PKCS1_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V5: + return "PKIX_RSA_PKCS1V5"; + case PublicKeyDetails.PKIX_RSA_PSS: + return "PKIX_RSA_PSS"; + case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: + return "PKIX_ECDSA_P256_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: + return "PKIX_ECDSA_P256_HMAC_SHA_256"; + case PublicKeyDetails.PKIX_ED25519: + return "PKIX_ED25519"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; +var SubjectAlternativeNameType; +(function (SubjectAlternativeNameType) { + SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL"; + SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI"; + /** + * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 + * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san + * for more details. + */ + SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME"; +})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {})); +function subjectAlternativeNameTypeFromJSON(object) { + switch (object) { + case 0: + case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": + return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; + case 1: + case "EMAIL": + return SubjectAlternativeNameType.EMAIL; + case 2: + case "URI": + return SubjectAlternativeNameType.URI; + case 3: + case "OTHER_NAME": + return SubjectAlternativeNameType.OTHER_NAME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; +function subjectAlternativeNameTypeToJSON(object) { + switch (object) { + case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: + return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + case SubjectAlternativeNameType.EMAIL: + return "EMAIL"; + case SubjectAlternativeNameType.URI: + return "URI"; + case SubjectAlternativeNameType.OTHER_NAME: + return "OTHER_NAME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; +function createBaseHashOutput() { + return { algorithm: 0, digest: Buffer.alloc(0) }; +} +exports.HashOutput = { + fromJSON(object) { + return { + algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm)); + message.digest !== undefined && + (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseMessageSignature() { + return { messageDigest: undefined, signature: Buffer.alloc(0) }; +} +exports.MessageSignature = { + fromJSON(object) { + return { + messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined, + signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.messageDigest !== undefined && + (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined); + message.signature !== undefined && + (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseLogId() { + return { keyId: Buffer.alloc(0) }; +} +exports.LogId = { + fromJSON(object) { + return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.keyId !== undefined && + (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseRFC3161SignedTimestamp() { + return { signedTimestamp: Buffer.alloc(0) }; +} +exports.RFC3161SignedTimestamp = { + fromJSON(object) { + return { + signedTimestamp: isSet(object.signedTimestamp) + ? Buffer.from(bytesFromBase64(object.signedTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedTimestamp !== undefined && + (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBasePublicKey() { + return { rawBytes: undefined, keyDetails: 0, validFor: undefined }; +} +exports.PublicKey = { + fromJSON(object) { + return { + rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, + keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, + validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined); + message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails)); + message.validFor !== undefined && + (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBasePublicKeyIdentifier() { + return { hint: "" }; +} +exports.PublicKeyIdentifier = { + fromJSON(object) { + return { hint: isSet(object.hint) ? String(object.hint) : "" }; + }, + toJSON(message) { + const obj = {}; + message.hint !== undefined && (obj.hint = message.hint); + return obj; + }, +}; +function createBaseObjectIdentifier() { + return { id: [] }; +} +exports.ObjectIdentifier = { + fromJSON(object) { + return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.id) { + obj.id = message.id.map((e) => Math.round(e)); + } + else { + obj.id = []; + } + return obj; + }, +}; +function createBaseObjectIdentifierValuePair() { + return { oid: undefined, value: Buffer.alloc(0) }; +} +exports.ObjectIdentifierValuePair = { + fromJSON(object) { + return { + oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined, + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseDistinguishedName() { + return { organization: "", commonName: "" }; +} +exports.DistinguishedName = { + fromJSON(object) { + return { + organization: isSet(object.organization) ? String(object.organization) : "", + commonName: isSet(object.commonName) ? String(object.commonName) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.organization !== undefined && (obj.organization = message.organization); + message.commonName !== undefined && (obj.commonName = message.commonName); + return obj; + }, +}; +function createBaseX509Certificate() { + return { rawBytes: Buffer.alloc(0) }; +} +exports.X509Certificate = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseSubjectAlternativeName() { + return { type: 0, identity: undefined }; +} +exports.SubjectAlternativeName = { + fromJSON(object) { + return { + type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, + identity: isSet(object.regexp) + ? { $case: "regexp", regexp: String(object.regexp) } + : isSet(object.value) + ? { $case: "value", value: String(object.value) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type)); + message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp); + message.identity?.$case === "value" && (obj.value = message.identity?.value); + return obj; + }, +}; +function createBaseX509CertificateChain() { + return { certificates: [] }; +} +exports.X509CertificateChain = { + fromJSON(object) { + return { + certificates: Array.isArray(object?.certificates) + ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.certificates) { + obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined); + } + else { + obj.certificates = []; + } + return obj; + }, +}; +function createBaseTimeRange() { + return { start: undefined, end: undefined }; +} +exports.TimeRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, + end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = message.start.toISOString()); + message.end !== undefined && (obj.end = message.end.toISOString()); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js new file mode 100644 index 0000000000000..398193b2075a7 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -0,0 +1,167 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseKindVersion() { + return { kind: "", version: "" }; +} +exports.KindVersion = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? String(object.kind) : "", + version: isSet(object.version) ? String(object.version) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.version !== undefined && (obj.version = message.version); + return obj; + }, +}; +function createBaseCheckpoint() { + return { envelope: "" }; +} +exports.Checkpoint = { + fromJSON(object) { + return { envelope: isSet(object.envelope) ? String(object.envelope) : "" }; + }, + toJSON(message) { + const obj = {}; + message.envelope !== undefined && (obj.envelope = message.envelope); + return obj; + }, +}; +function createBaseInclusionProof() { + return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined }; +} +exports.InclusionProof = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), + treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0", + hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [], + checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.rootHash !== undefined && + (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0))); + message.treeSize !== undefined && (obj.treeSize = message.treeSize); + if (message.hashes) { + obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0))); + } + else { + obj.hashes = []; + } + message.checkpoint !== undefined && + (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined); + return obj; + }, +}; +function createBaseInclusionPromise() { + return { signedEntryTimestamp: Buffer.alloc(0) }; +} +exports.InclusionPromise = { + fromJSON(object) { + return { + signedEntryTimestamp: isSet(object.signedEntryTimestamp) + ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedEntryTimestamp !== undefined && + (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseTransparencyLogEntry() { + return { + logIndex: "0", + logId: undefined, + kindVersion: undefined, + integratedTime: "0", + inclusionPromise: undefined, + inclusionProof: undefined, + canonicalizedBody: Buffer.alloc(0), + }; +} +exports.TransparencyLogEntry = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined, + integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0", + inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined, + inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined, + canonicalizedBody: isSet(object.canonicalizedBody) + ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + message.kindVersion !== undefined && + (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined); + message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime); + message.inclusionPromise !== undefined && + (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined); + message.inclusionProof !== undefined && + (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined); + message.canonicalizedBody !== undefined && + (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js new file mode 100644 index 0000000000000..05e566767cdb2 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseTransparencyLogInstance() { + return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined }; +} +exports.TransparencyLogInstance = { + fromJSON(object) { + return { + baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "", + hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0, + publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined, + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl); + message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm)); + message.publicKey !== undefined && + (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + return obj; + }, +}; +function createBaseCertificateAuthority() { + return { subject: undefined, uri: "", certChain: undefined, validFor: undefined }; +} +exports.CertificateAuthority = { + fromJSON(object) { + return { + subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined, + uri: isSet(object.uri) ? String(object.uri) : "", + certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.subject !== undefined && + (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined); + message.uri !== undefined && (obj.uri = message.uri); + message.certChain !== undefined && + (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined); + message.validFor !== undefined && + (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBaseTrustedRoot() { + return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] }; +} +exports.TrustedRoot = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [], + certificateAuthorities: Array.isArray(object?.certificateAuthorities) + ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + ctlogs: Array.isArray(object?.ctlogs) + ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + timestampAuthorities: Array.isArray(object?.timestampAuthorities) + ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + if (message.tlogs) { + obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.tlogs = []; + } + if (message.certificateAuthorities) { + obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.certificateAuthorities = []; + } + if (message.ctlogs) { + obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.ctlogs = []; + } + if (message.timestampAuthorities) { + obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.timestampAuthorities = []; + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js new file mode 100644 index 0000000000000..8a72b89761869 --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -0,0 +1,273 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0; +/* eslint-disable */ +const sigstore_bundle_1 = require("./sigstore_bundle"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_trustroot_1 = require("./sigstore_trustroot"); +function createBaseCertificateIdentity() { + return { issuer: "", san: undefined, oids: [] }; +} +exports.CertificateIdentity = { + fromJSON(object) { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined, + oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined); + if (message.oids) { + obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined); + } + else { + obj.oids = []; + } + return obj; + }, +}; +function createBaseCertificateIdentities() { + return { identities: [] }; +} +exports.CertificateIdentities = { + fromJSON(object) { + return { + identities: Array.isArray(object?.identities) + ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.identities) { + obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined); + } + else { + obj.identities = []; + } + return obj; + }, +}; +function createBasePublicKeyIdentities() { + return { publicKeys: [] }; +} +exports.PublicKeyIdentities = { + fromJSON(object) { + return { + publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.publicKeys) { + obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined); + } + else { + obj.publicKeys = []; + } + return obj; + }, +}; +function createBaseArtifactVerificationOptions() { + return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined }; +} +exports.ArtifactVerificationOptions = { + fromJSON(object) { + return { + signers: isSet(object.certificateIdentities) + ? { + $case: "certificateIdentities", + certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities), + } + : isSet(object.publicKeys) + ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) } + : undefined, + tlogOptions: isSet(object.tlogOptions) + ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) + : undefined, + ctlogOptions: isSet(object.ctlogOptions) + ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) + : undefined, + tsaOptions: isSet(object.tsaOptions) + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.signers?.$case === "certificateIdentities" && + (obj.certificateIdentities = message.signers?.certificateIdentities + ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities) + : undefined); + message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys + ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys) + : undefined); + message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions + ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions) + : undefined); + message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions + ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions) + : undefined); + message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions) + : undefined); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TlogOptions() { + return { threshold: 0, performOnlineVerification: false, disable: false }; +} +exports.ArtifactVerificationOptions_TlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + performOnlineVerification: isSet(object.performOnlineVerification) + ? Boolean(object.performOnlineVerification) + : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.performOnlineVerification !== undefined && + (obj.performOnlineVerification = message.performOnlineVerification); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_CtlogOptions() { + return { threshold: 0, detachedSct: false, disable: false }; +} +exports.ArtifactVerificationOptions_CtlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() { + return { threshold: 0, disable: false }; +} +exports.ArtifactVerificationOptions_TimestampAuthorityOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifact() { + return { data: undefined }; +} +exports.Artifact = { + fromJSON(object) { + return { + data: isSet(object.artifactUri) + ? { $case: "artifactUri", artifactUri: String(object.artifactUri) } + : isSet(object.artifact) + ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri); + message.data?.$case === "artifact" && + (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined); + return obj; + }, +}; +function createBaseInput() { + return { + artifactTrustRoot: undefined, + artifactVerificationOptions: undefined, + bundle: undefined, + artifact: undefined, + }; +} +exports.Input = { + fromJSON(object) { + return { + artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, + artifactVerificationOptions: isSet(object.artifactVerificationOptions) + ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) + : undefined, + bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined, + artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.artifactTrustRoot !== undefined && + (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined); + message.artifactVerificationOptions !== undefined && + (obj.artifactVerificationOptions = message.artifactVerificationOptions + ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions) + : undefined); + message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined); + message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000000..eafb768c48fca --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000000..450abb157f31a --- /dev/null +++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.2.1", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node14": "^1.0.3", + "@types/node": "^18.14.0", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json new file mode 100644 index 0000000000000..2b15d08060753 --- /dev/null +++ b/node_modules/@sigstore/bundle/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/bundle", + "version": "2.0.0", + "description": "Sigstore bundle type", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/sign/LICENSE b/node_modules/@sigstore/sign/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/sign/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/@sigstore/sign/dist/bundler/base.js new file mode 100644 index 0000000000000..61d5eba4568a3 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/bundler/base.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseBundleBuilder = void 0; +// BaseBundleBuilder is a base class for BundleBuilder implementations. It +// provides a the basic wokflow for signing and witnessing an artifact. +// Subclasses must implement the `package` method to assemble a valid bundle +// with the generated signature and verification material. +class BaseBundleBuilder { + constructor(options) { + this.signer = options.signer; + this.witnesses = options.witnesses; + } + // Executes the signing/witnessing process for the given artifact. + async create(artifact) { + const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); + const bundle = await this.package(artifact, signature); + // Invoke all of the witnesses in parallel + const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); + // Collect the verification material from all of the witnesses + const tlogEntryList = []; + const timestampList = []; + verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { + tlogEntryList.push(...(tlogEntries ?? [])); + timestampList.push(...(rfc3161Timestamps ?? [])); + }); + // Merge the collected verification material into the bundle + bundle.verificationMaterial.tlogEntries = tlogEntryList; + bundle.verificationMaterial.timestampVerificationData = { + rfc3161Timestamps: timestampList, + }; + return bundle; + } + // Override this function to apply any pre-signing transformations to the + // artifact. The returned buffer will be signed by the signer. The default + // implementation simply returns the artifact data. + async prepare(artifact) { + return artifact.data; + } +} +exports.BaseBundleBuilder = BaseBundleBuilder; +// Extracts the public key from a KeyMaterial. Returns either the public key +// or the certificate, depending on the type of key material. +function publicKey(key) { + switch (key.$case) { + case 'publicKey': + return key.publicKey; + case 'x509Certificate': + return key.certificate; + } +} diff --git a/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/@sigstore/sign/dist/bundler/bundle.js new file mode 100644 index 0000000000000..f01aac252b304 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/bundler/bundle.js @@ -0,0 +1,70 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const sigstore = __importStar(require("@sigstore/bundle")); +const util_1 = require("../util"); +// Helper functions for assembling the parts of a Sigstore bundle +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(artifact, signature) { + const digest = util_1.crypto.hash(artifact.data); + return sigstore.toMessageSignatureBundle({ + digest, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + }); +} +exports.toMessageSignatureBundle = toMessageSignatureBundle; +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(artifact, signature) { + return sigstore.toDSSEBundle({ + artifact: artifact.data, + artifactType: artifact.type, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + }); +} +exports.toDSSEBundle = toDSSEBundle; diff --git a/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/@sigstore/sign/dist/bundler/dsse.js new file mode 100644 index 0000000000000..486d289aea38c --- /dev/null +++ b/node_modules/@sigstore/sign/dist/bundler/dsse.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSEBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../util"); +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for DSSE wrapped attestations +class DSSEBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + // DSSE requires the artifact to be pre-encoded with the payload type + // before the signature is generated. + async prepare(artifact) { + const a = artifactDefaults(artifact); + return util_1.dsse.preAuthEncoding(a.type, a.data); + } + // Packages the artifact and signature into a DSSE bundle + async package(artifact, signature) { + return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature); + } +} +exports.DSSEBundleBuilder = DSSEBundleBuilder; +// Defaults the artifact type to an empty string if not provided +function artifactDefaults(artifact) { + return { + ...artifact, + type: artifact.type ?? '', + }; +} diff --git a/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/@sigstore/sign/dist/bundler/index.js new file mode 100644 index 0000000000000..d67c8c324a4f0 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/bundler/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var dsse_1 = require("./dsse"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); +var message_1 = require("./message"); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/@sigstore/sign/dist/bundler/message.js new file mode 100644 index 0000000000000..e3991f42bab93 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/bundler/message.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for raw message signatures +class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + async package(artifact, signature) { + return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); + } +} +exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/node_modules/@sigstore/sign/dist/error.js b/node_modules/@sigstore/sign/dist/error.js new file mode 100644 index 0000000000000..b52ea7eef5d9b --- /dev/null +++ b/node_modules/@sigstore/sign/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InternalError = void 0; +class InternalError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.name = this.constructor.name; + this.cause = cause; + this.code = code; + } +} +exports.InternalError = InternalError; diff --git a/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@sigstore/sign/dist/external/error.js new file mode 100644 index 0000000000000..d1e1c3df8a878 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/external/error.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkStatus = exports.HTTPError = void 0; +class HTTPError extends Error { + constructor(response) { + super(`HTTP Error: ${response.status} ${response.statusText}`); + this.response = response; + this.statusCode = response.status; + this.location = response.headers?.get('Location') || undefined; + } +} +exports.HTTPError = HTTPError; +const checkStatus = (response) => { + if (response.ok) { + return response; + } + else { + throw new HTTPError(response); + } +}; +exports.checkStatus = checkStatus; diff --git a/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@sigstore/sign/dist/external/fulcio.js new file mode 100644 index 0000000000000..b27637c2dc570 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/external/fulcio.js @@ -0,0 +1,51 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Fulcio = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = require("../util"); +const error_1 = require("./error"); +/** + * Fulcio API client. + */ +class Fulcio { + constructor(options) { + this.fetch = make_fetch_happen_1.default.defaults({ + retry: options.retry, + timeout: options.timeout, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': util_1.ua.getUserAgent(), + }, + }); + this.baseUrl = options.baseURL; + } + async createSigningCertificate(request) { + const url = `${this.baseUrl}/api/v2/signingCert`; + const response = await this.fetch(url, { + method: 'POST', + body: JSON.stringify(request), + }); + (0, error_1.checkStatus)(response); + const data = await response.json(); + return data; + } +} +exports.Fulcio = Fulcio; diff --git a/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@sigstore/sign/dist/external/rekor.js new file mode 100644 index 0000000000000..9b4e66b656251 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/external/rekor.js @@ -0,0 +1,115 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Rekor = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = require("../util"); +const error_1 = require("./error"); +/** + * Rekor API client. + */ +class Rekor { + constructor(options) { + this.fetch = make_fetch_happen_1.default.defaults({ + retry: options.retry, + timeout: options.timeout, + headers: { + Accept: 'application/json', + 'User-Agent': util_1.ua.getUserAgent(), + }, + }); + this.baseUrl = options.baseURL; + } + /** + * Create a new entry in the Rekor log. + * @param propsedEntry {ProposedEntry} Data to create a new entry + * @returns {Promise} The created entry + */ + async createEntry(propsedEntry) { + const url = `${this.baseUrl}/api/v1/log/entries`; + const response = await this.fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(propsedEntry), + }); + (0, error_1.checkStatus)(response); + const data = await response.json(); + return entryFromResponse(data); + } + /** + * Get an entry from the Rekor log. + * @param uuid {string} The UUID of the entry to retrieve + * @returns {Promise} The retrieved entry + */ + async getEntry(uuid) { + const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`; + const response = await this.fetch(url); + (0, error_1.checkStatus)(response); + const data = await response.json(); + return entryFromResponse(data); + } + /** + * Search the Rekor log index for entries matching the given query. + * @param opts {SearchIndex} Options to search the Rekor log + * @returns {Promise} UUIDs of matching entries + */ + async searchIndex(opts) { + const url = `${this.baseUrl}/api/v1/index/retrieve`; + const response = await this.fetch(url, { + method: 'POST', + body: JSON.stringify(opts), + headers: { 'Content-Type': 'application/json' }, + }); + (0, error_1.checkStatus)(response); + const data = await response.json(); + return data; + } + /** + * Search the Rekor logs for matching the given query. + * @param opts {SearchLogQuery} Query to search the Rekor log + * @returns {Promise} List of matching entries + */ + async searchLog(opts) { + const url = `${this.baseUrl}/api/v1/log/entries/retrieve`; + const response = await this.fetch(url, { + method: 'POST', + body: JSON.stringify(opts), + headers: { 'Content-Type': 'application/json' }, + }); + (0, error_1.checkStatus)(response); + const rawData = await response.json(); + const data = rawData.map((d) => entryFromResponse(d)); + return data; + } +} +exports.Rekor = Rekor; +// Unpack the response from the Rekor API into a more convenient format. +function entryFromResponse(data) { + const entries = Object.entries(data); + if (entries.length != 1) { + throw new Error('Received multiple entries in Rekor response'); + } + // Grab UUID and entry data from the response + const [uuid, entry] = entries[0]; + return { + ...entry, + uuid, + }; +} diff --git a/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@sigstore/sign/dist/external/tsa.js new file mode 100644 index 0000000000000..5277d7d3f9707 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/external/tsa.js @@ -0,0 +1,47 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimestampAuthority = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = require("../util"); +const error_1 = require("./error"); +class TimestampAuthority { + constructor(options) { + this.fetch = make_fetch_happen_1.default.defaults({ + retry: options.retry, + timeout: options.timeout, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': util_1.ua.getUserAgent(), + }, + }); + this.baseUrl = options.baseURL; + } + async createTimestamp(request) { + const url = `${this.baseUrl}/api/v1/timestamp`; + const response = await this.fetch(url, { + method: 'POST', + body: JSON.stringify(request), + }); + (0, error_1.checkStatus)(response); + return response.buffer(); + } +} +exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/@sigstore/sign/dist/identity/ci.js new file mode 100644 index 0000000000000..d79133952b605 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/identity/ci.js @@ -0,0 +1,73 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +// Collection of all the CI-specific providers we have implemented +const providers = [getGHAToken, getEnv]; +/** + * CIContextProvider is a composite identity provider which will iterate + * over all of the CI-specific providers and return the token from the first + * one that resolves. + */ +class CIContextProvider { + /* istanbul ignore next */ + constructor(audience = 'sigstore') { + this.audience = audience; + } + // Invoke all registered ProviderFuncs and return the value of whichever one + // resolves first. + async getToken() { + return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); + } +} +exports.CIContextProvider = CIContextProvider; +/** + * getGHAToken can retrieve an OIDC token when running in a GitHub Actions + * workflow + */ +async function getGHAToken(audience) { + // Check to see if we're running in GitHub Actions + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || + !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { + return Promise.reject('no token available'); + } + // Construct URL to request token w/ appropriate audience + const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); + url.searchParams.append('audience', audience); + const response = await (0, make_fetch_happen_1.default)(url.href, { + retry: 2, + headers: { + Accept: 'application/json', + Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, + }, + }); + return response.json().then((data) => data.value); +} +/** + * getEnv can retrieve an OIDC token from an environment variable. + * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar + */ +async function getEnv() { + if (!process.env.SIGSTORE_ID_TOKEN) { + return Promise.reject('no token available'); + } + return process.env.SIGSTORE_ID_TOKEN; +} diff --git a/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/@sigstore/sign/dist/identity/index.js new file mode 100644 index 0000000000000..1c1223b443fab --- /dev/null +++ b/node_modules/@sigstore/sign/dist/identity/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var ci_1 = require("./ci"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/@sigstore/sign/dist/identity/provider.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/identity/provider.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/index.js b/node_modules/@sigstore/sign/dist/index.js new file mode 100644 index 0000000000000..f6d97c673ec62 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/index.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.FulcioSigner = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var bundler_1 = require("./bundler"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); +var identity_1 = require("./identity"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); +var signer_1 = require("./signer"); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); +var witness_1 = require("./witness"); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js new file mode 100644 index 0000000000000..9c0af0e914493 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js @@ -0,0 +1,64 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const fulcio_1 = require("../../external/fulcio"); +class CAClient { + constructor(options) { + this.fulcio = new fulcio_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createSigningCertificate(identityToken, publicKey, challenge) { + const request = toCertificateRequest(identityToken, publicKey, challenge); + try { + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return cert.chain.certificates; + } + catch (err) { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'error creating signing certificate', + cause: err, + }); + } + } +} +exports.CAClient = CAClient; +function toCertificateRequest(identityToken, publicKey, challenge) { + return { + credentials: { + oidcIdentityToken: identityToken, + }, + publicKeyRequest: { + publicKey: { + algorithm: 'ECDSA', + content: publicKey, + }, + proofOfPossession: challenge.toString('base64'), + }, + }; +} diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js new file mode 100644 index 0000000000000..481aa5c3579a2 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EphemeralSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const EC_KEYPAIR_TYPE = 'ec'; +const P256_CURVE = 'P-256'; +// Signer implementation which uses an ephemeral keypair to sign artifacts. +// The private key lives only in memory and is tied to the lifetime of the +// EphemeralSigner instance. +class EphemeralSigner { + constructor() { + this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { + namedCurve: P256_CURVE, + }); + } + async sign(data) { + const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); + const publicKey = this.keypair.publicKey + .export({ format: 'pem', type: 'spki' }) + .toString('ascii'); + return { + signature: signature, + key: { $case: 'publicKey', publicKey }, + }; + } +} +exports.EphemeralSigner = EphemeralSigner; diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js new file mode 100644 index 0000000000000..b2eff7e1b981f --- /dev/null +++ b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js @@ -0,0 +1,73 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const util_1 = require("../../util"); +const ca_1 = require("./ca"); +const ephemeral_1 = require("./ephemeral"); +// Signer implementation which can be used to decorate another signer +// with a Fulcio-issued signing certificate for the signer's public key. +// Must be instantiated with an identity provider which can provide a JWT +// which represents the identity to be bound to the signing certificate. +class FulcioSigner { + constructor(options) { + this.ca = new ca_1.CAClient(options); + this.identityProvider = options.identityProvider; + this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); + } + async sign(data) { + // Retrieve identity token from the supplied identity provider + const identityToken = await this.getIdentityToken(); + // Extract challenge claim from OIDC token + const subject = util_1.oidc.extractJWTSubject(identityToken); + // Construct challenge value by signing the subject claim + const challenge = await this.keyHolder.sign(Buffer.from(subject)); + if (challenge.key.$case !== 'publicKey') { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'unexpected format for signing key', + }); + } + // Create signing certificate + const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); + // Generate artifact signature + const signature = await this.keyHolder.sign(data); + // Specifically returning only the first certificate in the chain + // as the key. + return { + signature: signature.signature, + key: { + $case: 'x509Certificate', + certificate: certificates[0], + }, + }; + } + async getIdentityToken() { + try { + return await this.identityProvider.getToken(); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_READ_ERROR', + message: 'error retrieving identity token', + cause: err, + }); + } + } +} +exports.FulcioSigner = FulcioSigner; diff --git a/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@sigstore/sign/dist/signer/index.js new file mode 100644 index 0000000000000..4f64adf41ed8d --- /dev/null +++ b/node_modules/@sigstore/sign/dist/signer/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var fulcio_1 = require("./fulcio"); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/@sigstore/sign/dist/signer/signer.js new file mode 100644 index 0000000000000..b92c54183375d --- /dev/null +++ b/node_modules/@sigstore/sign/dist/signer/signer.js @@ -0,0 +1,17 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/@sigstore/sign/dist/types/fetch.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/dist/util/crypto.js b/node_modules/@sigstore/sign/dist/util/crypto.js new file mode 100644 index 0000000000000..11aad2fb6ff8b --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/crypto.js @@ -0,0 +1,27 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hash = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const SHA256_ALGORITHM = 'sha256'; +function hash(data, algorithm = SHA256_ALGORITHM) { + return crypto_1.default.createHash(algorithm).update(data).digest(); +} +exports.hash = hash; diff --git a/node_modules/@sigstore/sign/dist/util/dsse.js b/node_modules/@sigstore/sign/dist/util/dsse.js new file mode 100644 index 0000000000000..befcdbdc14ec8 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/dsse.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.preAuthEncoding = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PAE_PREFIX = 'DSSEv1'; +// DSSE Pre-Authentication Encoding +function preAuthEncoding(payloadType, payload) { + const prefix = Buffer.from(`${PAE_PREFIX} ${payloadType.length} ${payloadType} ${payload.length} `, 'ascii'); + return Buffer.concat([prefix, payload]); +} +exports.preAuthEncoding = preAuthEncoding; diff --git a/node_modules/@sigstore/sign/dist/util/encoding.js b/node_modules/@sigstore/sign/dist/util/encoding.js new file mode 100644 index 0000000000000..b020ac4d6ecd4 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/encoding.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.base64Decode = exports.base64Encode = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const BASE64_ENCODING = 'base64'; +const UTF8_ENCODING = 'utf-8'; +function base64Encode(str) { + return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); +} +exports.base64Encode = base64Encode; +function base64Decode(str) { + return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); +} +exports.base64Decode = base64Decode; diff --git a/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/@sigstore/sign/dist/util/index.js new file mode 100644 index 0000000000000..567e5dbf6e04c --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/index.js @@ -0,0 +1,48 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ua = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +exports.crypto = __importStar(require("./crypto")); +exports.dsse = __importStar(require("./dsse")); +exports.encoding = __importStar(require("./encoding")); +exports.json = __importStar(require("./json")); +exports.oidc = __importStar(require("./oidc")); +exports.pem = __importStar(require("./pem")); +exports.ua = __importStar(require("./ua")); diff --git a/node_modules/@sigstore/sign/dist/util/json.js b/node_modules/@sigstore/sign/dist/util/json.js new file mode 100644 index 0000000000000..69176ad731eb7 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/json.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +// JSON canonicalization per https://github.com/cyberphone/json-canonicalization +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function canonicalize(object) { + let buffer = ''; + if (object === null || typeof object !== 'object' || object.toJSON != null) { + // Primitives or toJSONable objects + buffer += JSON.stringify(object); + } + else if (Array.isArray(object)) { + // Array - maintain element order + buffer += '['; + let first = true; + object.forEach((element) => { + if (!first) { + buffer += ','; + } + first = false; + // recursive call + buffer += canonicalize(element); + }); + buffer += ']'; + } + else { + // Object - Sort properties before serializing + buffer += '{'; + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer += ','; + } + first = false; + buffer += JSON.stringify(property); + buffer += ':'; + // recursive call + buffer += canonicalize(object[property]); + }); + buffer += '}'; + } + return buffer; +} +exports.canonicalize = canonicalize; diff --git a/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/@sigstore/sign/dist/util/oidc.js new file mode 100644 index 0000000000000..8b49f3bbe8440 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/oidc.js @@ -0,0 +1,54 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extractJWTSubject = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const enc = __importStar(require("./encoding")); +function extractJWTSubject(jwt) { + const parts = jwt.split('.', 3); + const payload = JSON.parse(enc.base64Decode(parts[1])); + switch (payload.iss) { + case 'https://accounts.google.com': + case 'https://oauth2.sigstore.dev/auth': + return payload.email; + default: + return payload.sub; + } +} +exports.extractJWTSubject = extractJWTSubject; diff --git a/node_modules/@sigstore/sign/dist/util/pem.js b/node_modules/@sigstore/sign/dist/util/pem.js new file mode 100644 index 0000000000000..36eeebd2052f5 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/pem.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDER = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PEM_HEADER = /-----BEGIN (.*)-----/; +const PEM_FOOTER = /-----END (.*)-----/; +function toDER(certificate) { + const lines = certificate + .split('\n') + .map((line) => line.match(PEM_HEADER) || line.match(PEM_FOOTER) ? '' : line); + return Buffer.from(lines.join(''), 'base64'); +} +exports.toDER = toDER; diff --git a/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/@sigstore/sign/dist/util/ua.js new file mode 100644 index 0000000000000..c142330eb8338 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/util/ua.js @@ -0,0 +1,33 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUserAgent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +// Format User-Agent: / () +// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent +const getUserAgent = () => { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const packageVersion = require('../../package.json').version; + const nodeVersion = process.version; + const platformName = os_1.default.platform(); + const archName = os_1.default.arch(); + return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; +}; +exports.getUserAgent = getUserAgent; diff --git a/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@sigstore/sign/dist/witness/index.js new file mode 100644 index 0000000000000..7218ea41bce6d --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/index.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var tlog_1 = require("./tlog"); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@sigstore/sign/dist/witness/tlog/client.js new file mode 100644 index 0000000000000..3c1b5212e4265 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/tlog/client.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TLogClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const error_2 = require("../../external/error"); +const rekor_1 = require("../../external/rekor"); +class TLogClient { + constructor(options) { + this.fetchOnConflict = options.fetchOnConflict ?? false; + this.rekor = new rekor_1.Rekor({ + baseURL: options.rekorBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createEntry(proposedEntry) { + let entry; + try { + entry = await this.rekor.createEntry(proposedEntry); + } + catch (err) { + // If the entry already exists, fetch it (if enabled) + if (entryExistsError(err) && this.fetchOnConflict) { + // Grab the UUID of the existing entry from the location header + /* istanbul ignore next */ + const uuid = err.location.split('/').pop() || ''; + try { + entry = await this.rekor.getEntry(uuid); + } + catch (err) { + throw new error_1.InternalError({ + code: 'TLOG_FETCH_ENTRY_ERROR', + message: 'error fetching tlog entry', + cause: err, + }); + } + } + else { + throw new error_1.InternalError({ + code: 'TLOG_CREATE_ENTRY_ERROR', + message: 'error creating tlog entry', + cause: err, + }); + } + } + return entry; + } +} +exports.TLogClient = TLogClient; +function entryExistsError(value) { + return (value instanceof error_2.HTTPError && + value.statusCode === 409 && + value.location !== undefined); +} diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js new file mode 100644 index 0000000000000..c237523a2c9b2 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js @@ -0,0 +1,136 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toProposedEntry = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const util_1 = require("../../util"); +function toProposedEntry(content, publicKey, +// TODO: Remove this parameter once have completely switched to 'dsse' entries +entryType = 'intoto') { + switch (content.$case) { + case 'dsseEnvelope': + // TODO: Remove this conditional once have completely switched to 'dsse' entries + if (entryType === 'dsse') { + return toProposedDSSEEntry(content.dsseEnvelope, publicKey); + } + return toProposedIntotoEntry(content.dsseEnvelope, publicKey); + case 'messageSignature': + return toProposedHashedRekordEntry(content.messageSignature, publicKey); + } +} +exports.toProposedEntry = toProposedEntry; +// Returns a properly formatted Rekor "hashedrekord" entry for the given digest +// and signature +function toProposedHashedRekordEntry(messageSignature, publicKey) { + const hexDigest = messageSignature.messageDigest.digest.toString('hex'); + const b64Signature = messageSignature.signature.toString('base64'); + const b64Key = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'hashedrekord', + spec: { + data: { + hash: { + algorithm: 'sha256', + value: hexDigest, + }, + }, + signature: { + content: b64Signature, + publicKey: { + content: b64Key, + }, + }, + }, + }; +} +// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope +// and signature +function toProposedDSSEEntry(envelope, publicKey) { + const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); + const encodedKey = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: envelopeJSON, + verifiers: [encodedKey], + }, + }, + }; +} +// Returns a properly formatted Rekor "intoto" entry for the given DSSE +// envelope and signature +function toProposedIntotoEntry(envelope, publicKey) { + // Calculate the value for the payloadHash field in the Rekor entry + const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex'); + // Calculate the value for the hash field in the Rekor entry + const envelopeHash = calculateDSSEHash(envelope, publicKey); + // Collect values for re-creating the DSSE envelope. + // Double-encode payload and signature cause that's what Rekor expects + const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); + const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); + const keyid = envelope.signatures[0].keyid; + const encodedKey = util_1.encoding.base64Encode(publicKey); + // Create the envelope portion of the entry. Note the inclusion of the + // publicKey in the signature struct is not a standard part of a DSSE + // envelope, but is required by Rekor. + const dsse = { + payloadType: envelope.payloadType, + payload: payload, + signatures: [{ sig, publicKey: encodedKey }], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. We + // need to do the same here so that we can properly recreate the entry for + // verification. + if (keyid.length > 0) { + dsse.signatures[0].keyid = keyid; + } + return { + apiVersion: '0.0.2', + kind: 'intoto', + spec: { + content: { + envelope: dsse, + hash: { algorithm: 'sha256', value: envelopeHash }, + payloadHash: { algorithm: 'sha256', value: payloadHash }, + }, + }, + }; +} +// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. +// There is no standard way to do this, so the scheme we're using as as +// follows: +// * payload is base64 encoded +// * signature is base64 encoded (only the first signature is used) +// * keyid is included ONLY if it is NOT an empty string +// * The resulting JSON is canonicalized and hashed to a hex string +function calculateDSSEHash(envelope, publicKey) { + const dsse = { + payloadType: envelope.payloadType, + payload: envelope.payload.toString('base64'), + signatures: [ + { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, + ], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. + if (envelope.signatures[0].keyid.length > 0) { + dsse.signatures[0].keyid = envelope.signatures[0].keyid; + } + return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex'); +} diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@sigstore/sign/dist/witness/tlog/index.js new file mode 100644 index 0000000000000..7d5487c2cb3c6 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/tlog/index.js @@ -0,0 +1,77 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RekorWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../../util"); +const client_1 = require("./client"); +const entry_1 = require("./entry"); +class RekorWitness { + constructor(options) { + this.tlog = new client_1.TLogClient(options); + } + async testify(content, publicKey) { + const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey); + const entry = await this.tlog.createEntry(proposedEntry); + return toTransparencyLogEntry(entry); + } +} +exports.RekorWitness = RekorWitness; +function toTransparencyLogEntry(entry) { + const logID = Buffer.from(entry.logID, 'hex'); + // Parse entry body so we can extract the kind and version. + const bodyJSON = util_1.encoding.base64Decode(entry.body); + const entryBody = JSON.parse(bodyJSON); + const promise = entry?.verification?.signedEntryTimestamp + ? inclusionPromise(entry.verification.signedEntryTimestamp) + : undefined; + const proof = entry?.verification?.inclusionProof + ? inclusionProof(entry.verification.inclusionProof) + : undefined; + const tlogEntry = { + logIndex: entry.logIndex.toString(), + logId: { + keyId: logID, + }, + integratedTime: entry.integratedTime.toString(), + kindVersion: { + kind: entryBody.kind, + version: entryBody.apiVersion, + }, + inclusionPromise: promise, + inclusionProof: proof, + canonicalizedBody: Buffer.from(entry.body, 'base64'), + }; + return { + tlogEntries: [tlogEntry], + }; +} +function inclusionPromise(promise) { + return { + signedEntryTimestamp: Buffer.from(promise, 'base64'), + }; +} +function inclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + treeSize: proof.treeSize.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + checkpoint: { + envelope: proof.checkpoint, + }, + }; +} diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@sigstore/sign/dist/witness/tsa/client.js new file mode 100644 index 0000000000000..d2a7610401c4e --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/tsa/client.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const tsa_1 = require("../../external/tsa"); +const util_1 = require("../../util"); +class TSAClient { + constructor(options) { + this.tsa = new tsa_1.TimestampAuthority({ + baseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createTimestamp(signature) { + const request = { + artifactHash: util_1.crypto.hash(signature).toString('base64'), + hashAlgorithm: 'sha256', + }; + try { + return await this.tsa.createTimestamp(request); + } + catch (err) { + throw new error_1.InternalError({ + code: 'TSA_CREATE_TIMESTAMP_ERROR', + message: 'error creating timestamp', + cause: err, + }); + } + } +} +exports.TSAClient = TSAClient; diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/@sigstore/sign/dist/witness/tsa/index.js new file mode 100644 index 0000000000000..d4f5c7c859d10 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/tsa/index.js @@ -0,0 +1,44 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const client_1 = require("./client"); +class TSAWitness { + constructor(options) { + this.tsa = new client_1.TSAClient({ + tsaBaseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async testify(content) { + const signature = extractSignature(content); + const timestamp = await this.tsa.createTimestamp(signature); + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; + } +} +exports.TSAWitness = TSAWitness; +function extractSignature(content) { + switch (content.$case) { + case 'dsseEnvelope': + return content.dsseEnvelope.signatures[0].sig; + case 'messageSignature': + return content.messageSignature.signature; + } +} diff --git a/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/@sigstore/sign/dist/witness/witness.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@sigstore/sign/dist/witness/witness.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js new file mode 100644 index 0000000000000..0c367a8384454 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -0,0 +1,89 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Envelope = void 0; +function createBaseEnvelope() { + return { payload: Buffer.alloc(0), payloadType: "", signatures: [] }; +} +exports.Envelope = { + fromJSON(object) { + return { + payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), + payloadType: isSet(object.payloadType) ? String(object.payloadType) : "", + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.payload !== undefined && + (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0))); + message.payloadType !== undefined && (obj.payloadType = message.payloadType); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined); + } + else { + obj.signatures = []; + } + return obj; + }, +}; +function createBaseSignature() { + return { sig: Buffer.alloc(0), keyid: "" }; +} +exports.Signature = { + fromJSON(object) { + return { + sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), + keyid: isSet(object.keyid) ? String(object.keyid) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0))); + message.keyid !== undefined && (obj.keyid = message.keyid); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 0000000000000..073093b8371a8 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,185 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +function createBaseCloudEvent() { + return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined }; +} +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? String(object.id) : "", + source: isSet(object.source) ? String(object.source) : "", + specVersion: isSet(object.specVersion) ? String(object.specVersion) : "", + type: isSet(object.type) ? String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.id !== undefined && (obj.id = message.id); + message.source !== undefined && (obj.source = message.source); + message.specVersion !== undefined && (obj.specVersion = message.specVersion); + message.type !== undefined && (obj.type = message.type); + obj.attributes = {}; + if (message.attributes) { + Object.entries(message.attributes).forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + message.data?.$case === "binaryData" && + (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined); + message.data?.$case === "textData" && (obj.textData = message.data?.textData); + message.data?.$case === "protoData" && + (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_AttributesEntry() { + return { key: "", value: undefined }; +} +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && + (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_CloudEventAttributeValue() { + return { attr: undefined }; +} +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean); + message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger)); + message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString); + message.attr?.$case === "ceBytes" && + (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined); + message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri); + message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef); + message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString()); + return obj; + }, +}; +function createBaseCloudEventBatch() { + return { events: [] }; +} +exports.CloudEventBatch = { + fromJSON(object) { + return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined); + } + else { + obj.events = []; + } + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js new file mode 100644 index 0000000000000..da627499ad765 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -0,0 +1,119 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0; +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +var FieldBehavior; +(function (FieldBehavior) { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED"; + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL"; + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED"; + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY"; + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY"; + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE"; + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST"; +})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {})); +function fieldBehaviorFromJSON(object) { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; +function fieldBehaviorToJSON(object) { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorToJSON = fieldBehaviorToJSON; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 0000000000000..6b3f3c97a6647 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,65 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +function createBaseAny() { + return { typeUrl: "", value: Buffer.alloc(0) }; +} +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js new file mode 100644 index 0000000000000..d429aac846043 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -0,0 +1,1308 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0; +var FieldDescriptorProto_Type; +(function (FieldDescriptorProto_Type) { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT"; + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64"; + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING"; + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP"; + /** TYPE_MESSAGE - Length-delimited aggregate. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE"; + /** TYPE_BYTES - New in version 2. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64"; + /** TYPE_SINT32 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32"; + /** TYPE_SINT64 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64"; +})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {})); +function fieldDescriptorProto_TypeFromJSON(object) { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; +function fieldDescriptorProto_TypeToJSON(object) { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; +var FieldDescriptorProto_Label; +(function (FieldDescriptorProto_Label) { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED"; +})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {})); +function fieldDescriptorProto_LabelFromJSON(object) { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; +function fieldDescriptorProto_LabelToJSON(object) { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; +/** Generated classes can be optimized for speed or code size. */ +var FileOptions_OptimizeMode; +(function (FileOptions_OptimizeMode) { + /** SPEED - Generate complete code for parsing, serialization, */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED"; + /** CODE_SIZE - etc. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE"; + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME"; +})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {})); +function fileOptions_OptimizeModeFromJSON(object) { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; +function fileOptions_OptimizeModeToJSON(object) { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; +var FieldOptions_CType; +(function (FieldOptions_CType) { + /** STRING - Default mode. */ + FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING"; + FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD"; + FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE"; +})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {})); +function fieldOptions_CTypeFromJSON(object) { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; +function fieldOptions_CTypeToJSON(object) { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; +var FieldOptions_JSType; +(function (FieldOptions_JSType) { + /** JS_NORMAL - Use the default type. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL"; + /** JS_STRING - Use JavaScript strings. */ + FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING"; + /** JS_NUMBER - Use JavaScript numbers. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER"; +})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {})); +function fieldOptions_JSTypeFromJSON(object) { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; +function fieldOptions_JSTypeToJSON(object) { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +var MethodOptions_IdempotencyLevel; +(function (MethodOptions_IdempotencyLevel) { + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN"; + /** NO_SIDE_EFFECTS - implies idempotent */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS"; + /** IDEMPOTENT - idempotent, but may have side effects */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT"; +})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {})); +function methodOptions_IdempotencyLevelFromJSON(object) { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; +function methodOptions_IdempotencyLevelToJSON(object) { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; +function createBaseFileDescriptorSet() { + return { file: [] }; +} +exports.FileDescriptorSet = { + fromJSON(object) { + return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined); + } + else { + obj.file = []; + } + return obj; + }, +}; +function createBaseFileDescriptorProto() { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} +exports.FileDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } + else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined); + } + else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined && + (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, +}; +function createBaseDescriptorProto() { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +exports.DescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } + else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined); + } + else { + obj.oneofDecl = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseDescriptorProto_ExtensionRange() { + return { start: 0, end: 0, options: undefined }; +} +exports.DescriptorProto_ExtensionRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined && + (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseDescriptorProto_ReservedRange() { + return { start: 0, end: 0 }; +} +exports.DescriptorProto_ReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseExtensionRangeOptions() { + return { uninterpretedOption: [] }; +} +exports.ExtensionRangeOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldDescriptorProto() { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} +exports.FieldDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, +}; +function createBaseOneofDescriptorProto() { + return { name: "", options: undefined }; +} +exports.OneofDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseEnumDescriptorProto() { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} +exports.EnumDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined); + } + else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseEnumDescriptorProto_EnumReservedRange() { + return { start: 0, end: 0 }; +} +exports.EnumDescriptorProto_EnumReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseEnumValueDescriptorProto() { + return { name: "", number: 0, options: undefined }; +} +exports.EnumValueDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined && + (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseServiceDescriptorProto() { + return { name: "", method: [], options: undefined }; +} +exports.ServiceDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined); + } + else { + obj.method = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseMethodDescriptorProto() { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} +exports.MethodDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined && + (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, +}; +function createBaseFileOptions() { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} +exports.FileOptions = { + fromJSON(object) { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined && + (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMessageOptions() { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} +exports.MessageOptions = { + fromJSON(object) { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined && + (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldOptions() { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [], + }; +} +exports.FieldOptions = { + fromJSON(object) { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseOneofOptions() { + return { uninterpretedOption: [] }; +} +exports.OneofOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumOptions() { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} +exports.EnumOptions = { + fromJSON(object) { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumValueOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.EnumValueOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseServiceOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.ServiceOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMethodOptions() { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} +exports.MethodOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined && + (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseUninterpretedOption() { + return { + name: [], + identifierValue: "", + positiveIntValue: "0", + negativeIntValue: "0", + doubleValue: 0, + stringValue: Buffer.alloc(0), + aggregateValue: "", + }; +} +exports.UninterpretedOption = { + fromJSON(object) { + return { + name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0", + negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0", + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined); + } + else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue); + message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined && + (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0))); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, +}; +function createBaseUninterpretedOption_NamePart() { + return { namePart: "", isExtension: false }; +} +exports.UninterpretedOption_NamePart = { + fromJSON(object) { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, +}; +function createBaseSourceCodeInfo() { + return { location: [] }; +} +exports.SourceCodeInfo = { + fromJSON(object) { + return { + location: Array.isArray(object?.location) + ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined); + } + else { + obj.location = []; + } + return obj; + }, +}; +function createBaseSourceCodeInfo_Location() { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} +exports.SourceCodeInfo_Location = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } + else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } + else { + obj.leadingDetachedComments = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo() { + return { annotation: [] }; +} +exports.GeneratedCodeInfo = { + fromJSON(object) { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } + else { + obj.annotation = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo_Annotation() { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} +exports.GeneratedCodeInfo_Annotation = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js new file mode 100644 index 0000000000000..159135fe87172 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js @@ -0,0 +1,24 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +function createBaseTimestamp() { + return { seconds: "0", nanos: 0 }; +} +exports.Timestamp = { + fromJSON(object) { + return { + seconds: isSet(object.seconds) ? String(object.seconds) : "0", + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + toJSON(message) { + const obj = {}; + message.seconds !== undefined && (obj.seconds = message.seconds); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js new file mode 100644 index 0000000000000..1ef3e1b3356b7 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js @@ -0,0 +1,106 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0; +/* eslint-disable */ +const envelope_1 = require("./envelope"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_rekor_1 = require("./sigstore_rekor"); +function createBaseTimestampVerificationData() { + return { rfc3161Timestamps: [] }; +} +exports.TimestampVerificationData = { + fromJSON(object) { + return { + rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps) + ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.rfc3161Timestamps) { + obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined); + } + else { + obj.rfc3161Timestamps = []; + } + return obj; + }, +}; +function createBaseVerificationMaterial() { + return { content: undefined, tlogEntries: [], timestampVerificationData: undefined }; +} +exports.VerificationMaterial = { + fromJSON(object) { + return { + content: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) } + : isSet(object.x509CertificateChain) + ? { + $case: "x509CertificateChain", + x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain), + } + : undefined, + tlogEntries: Array.isArray(object?.tlogEntries) + ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e)) + : [], + timestampVerificationData: isSet(object.timestampVerificationData) + ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.content?.$case === "publicKey" && + (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined); + message.content?.$case === "x509CertificateChain" && + (obj.x509CertificateChain = message.content?.x509CertificateChain + ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain) + : undefined); + if (message.tlogEntries) { + obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined); + } + else { + obj.tlogEntries = []; + } + message.timestampVerificationData !== undefined && + (obj.timestampVerificationData = message.timestampVerificationData + ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData) + : undefined); + return obj; + }, +}; +function createBaseBundle() { + return { mediaType: "", verificationMaterial: undefined, content: undefined }; +} +exports.Bundle = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + verificationMaterial: isSet(object.verificationMaterial) + ? exports.VerificationMaterial.fromJSON(object.verificationMaterial) + : undefined, + content: isSet(object.messageSignature) + ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) } + : isSet(object.dsseEnvelope) + ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial + ? exports.VerificationMaterial.toJSON(message.verificationMaterial) + : undefined); + message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature + ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature) + : undefined); + message.content?.$case === "dsseEnvelope" && + (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js new file mode 100644 index 0000000000000..bcd654e9154b9 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -0,0 +1,457 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0; +/* eslint-disable */ +const timestamp_1 = require("./google/protobuf/timestamp"); +/** + * Only a subset of the secure hash standard algorithms are supported. + * See for more + * details. + * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force + * any proto JSON serialization to emit the used hash algorithm, as default + * option is to *omit* the default value of an enum (which is the first + * value, represented by '0'. + */ +var HashAlgorithm; +(function (HashAlgorithm) { + HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED"; + HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256"; +})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {})); +function hashAlgorithmFromJSON(object) { + switch (object) { + case 0: + case "HASH_ALGORITHM_UNSPECIFIED": + return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; + case 1: + case "SHA2_256": + return HashAlgorithm.SHA2_256; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; +function hashAlgorithmToJSON(object) { + switch (object) { + case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: + return "HASH_ALGORITHM_UNSPECIFIED"; + case HashAlgorithm.SHA2_256: + return "SHA2_256"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmToJSON = hashAlgorithmToJSON; +/** + * Details of a specific public key, capturing the the key encoding method, + * and signature algorithm. + * To avoid the possibility of contradicting formats such as PKCS1 with + * ED25519 the valid permutations are listed as a linear set instead of a + * cartesian set (i.e one combined variable instead of two, one for encoding + * and one for the signature algorithm). + */ +var PublicKeyDetails; +(function (PublicKeyDetails) { + PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + /** PKCS1_RSA_PKCS1V5 - RSA */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5"; + /** PKCS1_RSA_PSS - See RFC8017 */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS"; + /** PKIX_ECDSA_P256_SHA_256 - ECDSA */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256"; + /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256"; + /** PKIX_ED25519 - Ed 25519 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519"; +})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {})); +function publicKeyDetailsFromJSON(object) { + switch (object) { + case 0: + case "PUBLIC_KEY_DETAILS_UNSPECIFIED": + return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; + case 1: + case "PKCS1_RSA_PKCS1V5": + return PublicKeyDetails.PKCS1_RSA_PKCS1V5; + case 2: + case "PKCS1_RSA_PSS": + return PublicKeyDetails.PKCS1_RSA_PSS; + case 3: + case "PKIX_RSA_PKCS1V5": + return PublicKeyDetails.PKIX_RSA_PKCS1V5; + case 4: + case "PKIX_RSA_PSS": + return PublicKeyDetails.PKIX_RSA_PSS; + case 5: + case "PKIX_ECDSA_P256_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; + case 6: + case "PKIX_ECDSA_P256_HMAC_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; + case 7: + case "PKIX_ED25519": + return PublicKeyDetails.PKIX_ED25519; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; +function publicKeyDetailsToJSON(object) { + switch (object) { + case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: + return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + case PublicKeyDetails.PKCS1_RSA_PKCS1V5: + return "PKCS1_RSA_PKCS1V5"; + case PublicKeyDetails.PKCS1_RSA_PSS: + return "PKCS1_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V5: + return "PKIX_RSA_PKCS1V5"; + case PublicKeyDetails.PKIX_RSA_PSS: + return "PKIX_RSA_PSS"; + case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: + return "PKIX_ECDSA_P256_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: + return "PKIX_ECDSA_P256_HMAC_SHA_256"; + case PublicKeyDetails.PKIX_ED25519: + return "PKIX_ED25519"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; +var SubjectAlternativeNameType; +(function (SubjectAlternativeNameType) { + SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL"; + SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI"; + /** + * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 + * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san + * for more details. + */ + SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME"; +})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {})); +function subjectAlternativeNameTypeFromJSON(object) { + switch (object) { + case 0: + case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": + return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; + case 1: + case "EMAIL": + return SubjectAlternativeNameType.EMAIL; + case 2: + case "URI": + return SubjectAlternativeNameType.URI; + case 3: + case "OTHER_NAME": + return SubjectAlternativeNameType.OTHER_NAME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; +function subjectAlternativeNameTypeToJSON(object) { + switch (object) { + case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: + return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + case SubjectAlternativeNameType.EMAIL: + return "EMAIL"; + case SubjectAlternativeNameType.URI: + return "URI"; + case SubjectAlternativeNameType.OTHER_NAME: + return "OTHER_NAME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; +function createBaseHashOutput() { + return { algorithm: 0, digest: Buffer.alloc(0) }; +} +exports.HashOutput = { + fromJSON(object) { + return { + algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm)); + message.digest !== undefined && + (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseMessageSignature() { + return { messageDigest: undefined, signature: Buffer.alloc(0) }; +} +exports.MessageSignature = { + fromJSON(object) { + return { + messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined, + signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.messageDigest !== undefined && + (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined); + message.signature !== undefined && + (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseLogId() { + return { keyId: Buffer.alloc(0) }; +} +exports.LogId = { + fromJSON(object) { + return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.keyId !== undefined && + (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseRFC3161SignedTimestamp() { + return { signedTimestamp: Buffer.alloc(0) }; +} +exports.RFC3161SignedTimestamp = { + fromJSON(object) { + return { + signedTimestamp: isSet(object.signedTimestamp) + ? Buffer.from(bytesFromBase64(object.signedTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedTimestamp !== undefined && + (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBasePublicKey() { + return { rawBytes: undefined, keyDetails: 0, validFor: undefined }; +} +exports.PublicKey = { + fromJSON(object) { + return { + rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, + keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, + validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined); + message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails)); + message.validFor !== undefined && + (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBasePublicKeyIdentifier() { + return { hint: "" }; +} +exports.PublicKeyIdentifier = { + fromJSON(object) { + return { hint: isSet(object.hint) ? String(object.hint) : "" }; + }, + toJSON(message) { + const obj = {}; + message.hint !== undefined && (obj.hint = message.hint); + return obj; + }, +}; +function createBaseObjectIdentifier() { + return { id: [] }; +} +exports.ObjectIdentifier = { + fromJSON(object) { + return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.id) { + obj.id = message.id.map((e) => Math.round(e)); + } + else { + obj.id = []; + } + return obj; + }, +}; +function createBaseObjectIdentifierValuePair() { + return { oid: undefined, value: Buffer.alloc(0) }; +} +exports.ObjectIdentifierValuePair = { + fromJSON(object) { + return { + oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined, + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseDistinguishedName() { + return { organization: "", commonName: "" }; +} +exports.DistinguishedName = { + fromJSON(object) { + return { + organization: isSet(object.organization) ? String(object.organization) : "", + commonName: isSet(object.commonName) ? String(object.commonName) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.organization !== undefined && (obj.organization = message.organization); + message.commonName !== undefined && (obj.commonName = message.commonName); + return obj; + }, +}; +function createBaseX509Certificate() { + return { rawBytes: Buffer.alloc(0) }; +} +exports.X509Certificate = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseSubjectAlternativeName() { + return { type: 0, identity: undefined }; +} +exports.SubjectAlternativeName = { + fromJSON(object) { + return { + type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, + identity: isSet(object.regexp) + ? { $case: "regexp", regexp: String(object.regexp) } + : isSet(object.value) + ? { $case: "value", value: String(object.value) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type)); + message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp); + message.identity?.$case === "value" && (obj.value = message.identity?.value); + return obj; + }, +}; +function createBaseX509CertificateChain() { + return { certificates: [] }; +} +exports.X509CertificateChain = { + fromJSON(object) { + return { + certificates: Array.isArray(object?.certificates) + ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.certificates) { + obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined); + } + else { + obj.certificates = []; + } + return obj; + }, +}; +function createBaseTimeRange() { + return { start: undefined, end: undefined }; +} +exports.TimeRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, + end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = message.start.toISOString()); + message.end !== undefined && (obj.end = message.end.toISOString()); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js new file mode 100644 index 0000000000000..398193b2075a7 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -0,0 +1,167 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseKindVersion() { + return { kind: "", version: "" }; +} +exports.KindVersion = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? String(object.kind) : "", + version: isSet(object.version) ? String(object.version) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.version !== undefined && (obj.version = message.version); + return obj; + }, +}; +function createBaseCheckpoint() { + return { envelope: "" }; +} +exports.Checkpoint = { + fromJSON(object) { + return { envelope: isSet(object.envelope) ? String(object.envelope) : "" }; + }, + toJSON(message) { + const obj = {}; + message.envelope !== undefined && (obj.envelope = message.envelope); + return obj; + }, +}; +function createBaseInclusionProof() { + return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined }; +} +exports.InclusionProof = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), + treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0", + hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [], + checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.rootHash !== undefined && + (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0))); + message.treeSize !== undefined && (obj.treeSize = message.treeSize); + if (message.hashes) { + obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0))); + } + else { + obj.hashes = []; + } + message.checkpoint !== undefined && + (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined); + return obj; + }, +}; +function createBaseInclusionPromise() { + return { signedEntryTimestamp: Buffer.alloc(0) }; +} +exports.InclusionPromise = { + fromJSON(object) { + return { + signedEntryTimestamp: isSet(object.signedEntryTimestamp) + ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedEntryTimestamp !== undefined && + (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseTransparencyLogEntry() { + return { + logIndex: "0", + logId: undefined, + kindVersion: undefined, + integratedTime: "0", + inclusionPromise: undefined, + inclusionProof: undefined, + canonicalizedBody: Buffer.alloc(0), + }; +} +exports.TransparencyLogEntry = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined, + integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0", + inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined, + inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined, + canonicalizedBody: isSet(object.canonicalizedBody) + ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + message.kindVersion !== undefined && + (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined); + message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime); + message.inclusionPromise !== undefined && + (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined); + message.inclusionProof !== undefined && + (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined); + message.canonicalizedBody !== undefined && + (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js new file mode 100644 index 0000000000000..05e566767cdb2 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseTransparencyLogInstance() { + return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined }; +} +exports.TransparencyLogInstance = { + fromJSON(object) { + return { + baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "", + hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0, + publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined, + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl); + message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm)); + message.publicKey !== undefined && + (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + return obj; + }, +}; +function createBaseCertificateAuthority() { + return { subject: undefined, uri: "", certChain: undefined, validFor: undefined }; +} +exports.CertificateAuthority = { + fromJSON(object) { + return { + subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined, + uri: isSet(object.uri) ? String(object.uri) : "", + certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.subject !== undefined && + (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined); + message.uri !== undefined && (obj.uri = message.uri); + message.certChain !== undefined && + (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined); + message.validFor !== undefined && + (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBaseTrustedRoot() { + return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] }; +} +exports.TrustedRoot = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [], + certificateAuthorities: Array.isArray(object?.certificateAuthorities) + ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + ctlogs: Array.isArray(object?.ctlogs) + ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + timestampAuthorities: Array.isArray(object?.timestampAuthorities) + ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + if (message.tlogs) { + obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.tlogs = []; + } + if (message.certificateAuthorities) { + obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.certificateAuthorities = []; + } + if (message.ctlogs) { + obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.ctlogs = []; + } + if (message.timestampAuthorities) { + obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.timestampAuthorities = []; + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js new file mode 100644 index 0000000000000..8a72b89761869 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -0,0 +1,273 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0; +/* eslint-disable */ +const sigstore_bundle_1 = require("./sigstore_bundle"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_trustroot_1 = require("./sigstore_trustroot"); +function createBaseCertificateIdentity() { + return { issuer: "", san: undefined, oids: [] }; +} +exports.CertificateIdentity = { + fromJSON(object) { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined, + oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined); + if (message.oids) { + obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined); + } + else { + obj.oids = []; + } + return obj; + }, +}; +function createBaseCertificateIdentities() { + return { identities: [] }; +} +exports.CertificateIdentities = { + fromJSON(object) { + return { + identities: Array.isArray(object?.identities) + ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.identities) { + obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined); + } + else { + obj.identities = []; + } + return obj; + }, +}; +function createBasePublicKeyIdentities() { + return { publicKeys: [] }; +} +exports.PublicKeyIdentities = { + fromJSON(object) { + return { + publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.publicKeys) { + obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined); + } + else { + obj.publicKeys = []; + } + return obj; + }, +}; +function createBaseArtifactVerificationOptions() { + return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined }; +} +exports.ArtifactVerificationOptions = { + fromJSON(object) { + return { + signers: isSet(object.certificateIdentities) + ? { + $case: "certificateIdentities", + certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities), + } + : isSet(object.publicKeys) + ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) } + : undefined, + tlogOptions: isSet(object.tlogOptions) + ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) + : undefined, + ctlogOptions: isSet(object.ctlogOptions) + ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) + : undefined, + tsaOptions: isSet(object.tsaOptions) + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.signers?.$case === "certificateIdentities" && + (obj.certificateIdentities = message.signers?.certificateIdentities + ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities) + : undefined); + message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys + ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys) + : undefined); + message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions + ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions) + : undefined); + message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions + ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions) + : undefined); + message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions) + : undefined); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TlogOptions() { + return { threshold: 0, performOnlineVerification: false, disable: false }; +} +exports.ArtifactVerificationOptions_TlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + performOnlineVerification: isSet(object.performOnlineVerification) + ? Boolean(object.performOnlineVerification) + : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.performOnlineVerification !== undefined && + (obj.performOnlineVerification = message.performOnlineVerification); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_CtlogOptions() { + return { threshold: 0, detachedSct: false, disable: false }; +} +exports.ArtifactVerificationOptions_CtlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() { + return { threshold: 0, disable: false }; +} +exports.ArtifactVerificationOptions_TimestampAuthorityOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifact() { + return { data: undefined }; +} +exports.Artifact = { + fromJSON(object) { + return { + data: isSet(object.artifactUri) + ? { $case: "artifactUri", artifactUri: String(object.artifactUri) } + : isSet(object.artifact) + ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri); + message.data?.$case === "artifact" && + (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined); + return obj; + }, +}; +function createBaseInput() { + return { + artifactTrustRoot: undefined, + artifactVerificationOptions: undefined, + bundle: undefined, + artifact: undefined, + }; +} +exports.Input = { + fromJSON(object) { + return { + artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, + artifactVerificationOptions: isSet(object.artifactVerificationOptions) + ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) + : undefined, + bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined, + artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.artifactTrustRoot !== undefined && + (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined); + message.artifactVerificationOptions !== undefined && + (obj.artifactVerificationOptions = message.artifactVerificationOptions + ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions) + : undefined); + message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined); + message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000000..eafb768c48fca --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000000..450abb157f31a --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.2.1", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node14": "^1.0.3", + "@types/node": "^18.14.0", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json new file mode 100644 index 0000000000000..732c94f9fcd49 --- /dev/null +++ b/node_modules/@sigstore/sign/package.json @@ -0,0 +1,42 @@ +{ + "name": "@sigstore/sign", + "version": "2.0.0", + "description": "Sigstore signing library", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.3.0", + "@sigstore/rekor-types": "^2.0.0", + "@types/make-fetch-happen": "^10.0.0" + }, + "dependencies": { + "@sigstore/bundle": "^2.0.0", + "@sigstore/protobuf-specs": "^0.2.1", + "make-fetch-happen": "^13.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@sigstore/tuf/dist/client.js index 08d6b61840909..797346d39e620 100644 --- a/node_modules/@sigstore/tuf/dist/client.js +++ b/node_modules/@sigstore/tuf/dist/client.js @@ -76,21 +76,8 @@ function initClient(cachePath, remote, options) { const baseURL = remote.mirror; const config = { fetchTimeout: options.timeout, + fetchRetry: options.retry, }; - // tuf-js only supports a number for fetchRetries so we have to - // convert the boolean and object options to a number. - /* istanbul ignore if */ - if (typeof options.retry !== 'undefined') { - if (typeof options.retry === 'number') { - config.fetchRetries = options.retry; - } - else if (typeof options.retry === 'object') { - config.fetchRetries = options.retry.retries; - } - else if (options.retry === true) { - config.fetchRetries = 1; - } - } return new tuf_js_1.Updater({ metadataBaseUrl: baseURL, targetBaseUrl: `${baseURL}/targets`, diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js new file mode 100644 index 0000000000000..0c367a8384454 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -0,0 +1,89 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Envelope = void 0; +function createBaseEnvelope() { + return { payload: Buffer.alloc(0), payloadType: "", signatures: [] }; +} +exports.Envelope = { + fromJSON(object) { + return { + payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), + payloadType: isSet(object.payloadType) ? String(object.payloadType) : "", + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.payload !== undefined && + (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0))); + message.payloadType !== undefined && (obj.payloadType = message.payloadType); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined); + } + else { + obj.signatures = []; + } + return obj; + }, +}; +function createBaseSignature() { + return { sig: Buffer.alloc(0), keyid: "" }; +} +exports.Signature = { + fromJSON(object) { + return { + sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), + keyid: isSet(object.keyid) ? String(object.keyid) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0))); + message.keyid !== undefined && (obj.keyid = message.keyid); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 0000000000000..073093b8371a8 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,185 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +function createBaseCloudEvent() { + return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined }; +} +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? String(object.id) : "", + source: isSet(object.source) ? String(object.source) : "", + specVersion: isSet(object.specVersion) ? String(object.specVersion) : "", + type: isSet(object.type) ? String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.id !== undefined && (obj.id = message.id); + message.source !== undefined && (obj.source = message.source); + message.specVersion !== undefined && (obj.specVersion = message.specVersion); + message.type !== undefined && (obj.type = message.type); + obj.attributes = {}; + if (message.attributes) { + Object.entries(message.attributes).forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + message.data?.$case === "binaryData" && + (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined); + message.data?.$case === "textData" && (obj.textData = message.data?.textData); + message.data?.$case === "protoData" && + (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_AttributesEntry() { + return { key: "", value: undefined }; +} +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && + (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_CloudEventAttributeValue() { + return { attr: undefined }; +} +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean); + message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger)); + message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString); + message.attr?.$case === "ceBytes" && + (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined); + message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri); + message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef); + message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString()); + return obj; + }, +}; +function createBaseCloudEventBatch() { + return { events: [] }; +} +exports.CloudEventBatch = { + fromJSON(object) { + return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined); + } + else { + obj.events = []; + } + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js new file mode 100644 index 0000000000000..da627499ad765 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -0,0 +1,119 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0; +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +var FieldBehavior; +(function (FieldBehavior) { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED"; + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL"; + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED"; + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY"; + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY"; + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE"; + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST"; +})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {})); +function fieldBehaviorFromJSON(object) { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; +function fieldBehaviorToJSON(object) { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorToJSON = fieldBehaviorToJSON; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 0000000000000..6b3f3c97a6647 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,65 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +function createBaseAny() { + return { typeUrl: "", value: Buffer.alloc(0) }; +} +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js new file mode 100644 index 0000000000000..d429aac846043 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -0,0 +1,1308 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0; +var FieldDescriptorProto_Type; +(function (FieldDescriptorProto_Type) { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT"; + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64"; + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING"; + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP"; + /** TYPE_MESSAGE - Length-delimited aggregate. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE"; + /** TYPE_BYTES - New in version 2. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64"; + /** TYPE_SINT32 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32"; + /** TYPE_SINT64 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64"; +})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {})); +function fieldDescriptorProto_TypeFromJSON(object) { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; +function fieldDescriptorProto_TypeToJSON(object) { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; +var FieldDescriptorProto_Label; +(function (FieldDescriptorProto_Label) { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED"; +})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {})); +function fieldDescriptorProto_LabelFromJSON(object) { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; +function fieldDescriptorProto_LabelToJSON(object) { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; +/** Generated classes can be optimized for speed or code size. */ +var FileOptions_OptimizeMode; +(function (FileOptions_OptimizeMode) { + /** SPEED - Generate complete code for parsing, serialization, */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED"; + /** CODE_SIZE - etc. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE"; + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME"; +})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {})); +function fileOptions_OptimizeModeFromJSON(object) { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; +function fileOptions_OptimizeModeToJSON(object) { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; +var FieldOptions_CType; +(function (FieldOptions_CType) { + /** STRING - Default mode. */ + FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING"; + FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD"; + FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE"; +})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {})); +function fieldOptions_CTypeFromJSON(object) { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; +function fieldOptions_CTypeToJSON(object) { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; +var FieldOptions_JSType; +(function (FieldOptions_JSType) { + /** JS_NORMAL - Use the default type. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL"; + /** JS_STRING - Use JavaScript strings. */ + FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING"; + /** JS_NUMBER - Use JavaScript numbers. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER"; +})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {})); +function fieldOptions_JSTypeFromJSON(object) { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; +function fieldOptions_JSTypeToJSON(object) { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +var MethodOptions_IdempotencyLevel; +(function (MethodOptions_IdempotencyLevel) { + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN"; + /** NO_SIDE_EFFECTS - implies idempotent */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS"; + /** IDEMPOTENT - idempotent, but may have side effects */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT"; +})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {})); +function methodOptions_IdempotencyLevelFromJSON(object) { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; +function methodOptions_IdempotencyLevelToJSON(object) { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; +function createBaseFileDescriptorSet() { + return { file: [] }; +} +exports.FileDescriptorSet = { + fromJSON(object) { + return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined); + } + else { + obj.file = []; + } + return obj; + }, +}; +function createBaseFileDescriptorProto() { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} +exports.FileDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } + else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined); + } + else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined && + (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, +}; +function createBaseDescriptorProto() { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +exports.DescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } + else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined); + } + else { + obj.oneofDecl = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseDescriptorProto_ExtensionRange() { + return { start: 0, end: 0, options: undefined }; +} +exports.DescriptorProto_ExtensionRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined && + (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseDescriptorProto_ReservedRange() { + return { start: 0, end: 0 }; +} +exports.DescriptorProto_ReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseExtensionRangeOptions() { + return { uninterpretedOption: [] }; +} +exports.ExtensionRangeOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldDescriptorProto() { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} +exports.FieldDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, +}; +function createBaseOneofDescriptorProto() { + return { name: "", options: undefined }; +} +exports.OneofDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseEnumDescriptorProto() { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} +exports.EnumDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined); + } + else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseEnumDescriptorProto_EnumReservedRange() { + return { start: 0, end: 0 }; +} +exports.EnumDescriptorProto_EnumReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseEnumValueDescriptorProto() { + return { name: "", number: 0, options: undefined }; +} +exports.EnumValueDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined && + (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseServiceDescriptorProto() { + return { name: "", method: [], options: undefined }; +} +exports.ServiceDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined); + } + else { + obj.method = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseMethodDescriptorProto() { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} +exports.MethodDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined && + (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, +}; +function createBaseFileOptions() { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} +exports.FileOptions = { + fromJSON(object) { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined && + (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMessageOptions() { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} +exports.MessageOptions = { + fromJSON(object) { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined && + (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldOptions() { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [], + }; +} +exports.FieldOptions = { + fromJSON(object) { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseOneofOptions() { + return { uninterpretedOption: [] }; +} +exports.OneofOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumOptions() { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} +exports.EnumOptions = { + fromJSON(object) { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumValueOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.EnumValueOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseServiceOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.ServiceOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMethodOptions() { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} +exports.MethodOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined && + (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseUninterpretedOption() { + return { + name: [], + identifierValue: "", + positiveIntValue: "0", + negativeIntValue: "0", + doubleValue: 0, + stringValue: Buffer.alloc(0), + aggregateValue: "", + }; +} +exports.UninterpretedOption = { + fromJSON(object) { + return { + name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0", + negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0", + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined); + } + else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue); + message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined && + (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0))); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, +}; +function createBaseUninterpretedOption_NamePart() { + return { namePart: "", isExtension: false }; +} +exports.UninterpretedOption_NamePart = { + fromJSON(object) { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, +}; +function createBaseSourceCodeInfo() { + return { location: [] }; +} +exports.SourceCodeInfo = { + fromJSON(object) { + return { + location: Array.isArray(object?.location) + ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined); + } + else { + obj.location = []; + } + return obj; + }, +}; +function createBaseSourceCodeInfo_Location() { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} +exports.SourceCodeInfo_Location = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } + else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } + else { + obj.leadingDetachedComments = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo() { + return { annotation: [] }; +} +exports.GeneratedCodeInfo = { + fromJSON(object) { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } + else { + obj.annotation = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo_Annotation() { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} +exports.GeneratedCodeInfo_Annotation = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js new file mode 100644 index 0000000000000..159135fe87172 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js @@ -0,0 +1,24 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +function createBaseTimestamp() { + return { seconds: "0", nanos: 0 }; +} +exports.Timestamp = { + fromJSON(object) { + return { + seconds: isSet(object.seconds) ? String(object.seconds) : "0", + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + toJSON(message) { + const obj = {}; + message.seconds !== undefined && (obj.seconds = message.seconds); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js new file mode 100644 index 0000000000000..1ef3e1b3356b7 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js @@ -0,0 +1,106 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0; +/* eslint-disable */ +const envelope_1 = require("./envelope"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_rekor_1 = require("./sigstore_rekor"); +function createBaseTimestampVerificationData() { + return { rfc3161Timestamps: [] }; +} +exports.TimestampVerificationData = { + fromJSON(object) { + return { + rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps) + ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.rfc3161Timestamps) { + obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined); + } + else { + obj.rfc3161Timestamps = []; + } + return obj; + }, +}; +function createBaseVerificationMaterial() { + return { content: undefined, tlogEntries: [], timestampVerificationData: undefined }; +} +exports.VerificationMaterial = { + fromJSON(object) { + return { + content: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) } + : isSet(object.x509CertificateChain) + ? { + $case: "x509CertificateChain", + x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain), + } + : undefined, + tlogEntries: Array.isArray(object?.tlogEntries) + ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e)) + : [], + timestampVerificationData: isSet(object.timestampVerificationData) + ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.content?.$case === "publicKey" && + (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined); + message.content?.$case === "x509CertificateChain" && + (obj.x509CertificateChain = message.content?.x509CertificateChain + ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain) + : undefined); + if (message.tlogEntries) { + obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined); + } + else { + obj.tlogEntries = []; + } + message.timestampVerificationData !== undefined && + (obj.timestampVerificationData = message.timestampVerificationData + ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData) + : undefined); + return obj; + }, +}; +function createBaseBundle() { + return { mediaType: "", verificationMaterial: undefined, content: undefined }; +} +exports.Bundle = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + verificationMaterial: isSet(object.verificationMaterial) + ? exports.VerificationMaterial.fromJSON(object.verificationMaterial) + : undefined, + content: isSet(object.messageSignature) + ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) } + : isSet(object.dsseEnvelope) + ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial + ? exports.VerificationMaterial.toJSON(message.verificationMaterial) + : undefined); + message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature + ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature) + : undefined); + message.content?.$case === "dsseEnvelope" && + (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js new file mode 100644 index 0000000000000..bcd654e9154b9 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -0,0 +1,457 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0; +/* eslint-disable */ +const timestamp_1 = require("./google/protobuf/timestamp"); +/** + * Only a subset of the secure hash standard algorithms are supported. + * See for more + * details. + * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force + * any proto JSON serialization to emit the used hash algorithm, as default + * option is to *omit* the default value of an enum (which is the first + * value, represented by '0'. + */ +var HashAlgorithm; +(function (HashAlgorithm) { + HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED"; + HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256"; +})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {})); +function hashAlgorithmFromJSON(object) { + switch (object) { + case 0: + case "HASH_ALGORITHM_UNSPECIFIED": + return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; + case 1: + case "SHA2_256": + return HashAlgorithm.SHA2_256; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; +function hashAlgorithmToJSON(object) { + switch (object) { + case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: + return "HASH_ALGORITHM_UNSPECIFIED"; + case HashAlgorithm.SHA2_256: + return "SHA2_256"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmToJSON = hashAlgorithmToJSON; +/** + * Details of a specific public key, capturing the the key encoding method, + * and signature algorithm. + * To avoid the possibility of contradicting formats such as PKCS1 with + * ED25519 the valid permutations are listed as a linear set instead of a + * cartesian set (i.e one combined variable instead of two, one for encoding + * and one for the signature algorithm). + */ +var PublicKeyDetails; +(function (PublicKeyDetails) { + PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + /** PKCS1_RSA_PKCS1V5 - RSA */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5"; + /** PKCS1_RSA_PSS - See RFC8017 */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS"; + /** PKIX_ECDSA_P256_SHA_256 - ECDSA */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256"; + /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256"; + /** PKIX_ED25519 - Ed 25519 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519"; +})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {})); +function publicKeyDetailsFromJSON(object) { + switch (object) { + case 0: + case "PUBLIC_KEY_DETAILS_UNSPECIFIED": + return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; + case 1: + case "PKCS1_RSA_PKCS1V5": + return PublicKeyDetails.PKCS1_RSA_PKCS1V5; + case 2: + case "PKCS1_RSA_PSS": + return PublicKeyDetails.PKCS1_RSA_PSS; + case 3: + case "PKIX_RSA_PKCS1V5": + return PublicKeyDetails.PKIX_RSA_PKCS1V5; + case 4: + case "PKIX_RSA_PSS": + return PublicKeyDetails.PKIX_RSA_PSS; + case 5: + case "PKIX_ECDSA_P256_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; + case 6: + case "PKIX_ECDSA_P256_HMAC_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; + case 7: + case "PKIX_ED25519": + return PublicKeyDetails.PKIX_ED25519; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; +function publicKeyDetailsToJSON(object) { + switch (object) { + case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: + return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + case PublicKeyDetails.PKCS1_RSA_PKCS1V5: + return "PKCS1_RSA_PKCS1V5"; + case PublicKeyDetails.PKCS1_RSA_PSS: + return "PKCS1_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V5: + return "PKIX_RSA_PKCS1V5"; + case PublicKeyDetails.PKIX_RSA_PSS: + return "PKIX_RSA_PSS"; + case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: + return "PKIX_ECDSA_P256_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: + return "PKIX_ECDSA_P256_HMAC_SHA_256"; + case PublicKeyDetails.PKIX_ED25519: + return "PKIX_ED25519"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; +var SubjectAlternativeNameType; +(function (SubjectAlternativeNameType) { + SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL"; + SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI"; + /** + * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 + * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san + * for more details. + */ + SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME"; +})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {})); +function subjectAlternativeNameTypeFromJSON(object) { + switch (object) { + case 0: + case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": + return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; + case 1: + case "EMAIL": + return SubjectAlternativeNameType.EMAIL; + case 2: + case "URI": + return SubjectAlternativeNameType.URI; + case 3: + case "OTHER_NAME": + return SubjectAlternativeNameType.OTHER_NAME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; +function subjectAlternativeNameTypeToJSON(object) { + switch (object) { + case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: + return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + case SubjectAlternativeNameType.EMAIL: + return "EMAIL"; + case SubjectAlternativeNameType.URI: + return "URI"; + case SubjectAlternativeNameType.OTHER_NAME: + return "OTHER_NAME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; +function createBaseHashOutput() { + return { algorithm: 0, digest: Buffer.alloc(0) }; +} +exports.HashOutput = { + fromJSON(object) { + return { + algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm)); + message.digest !== undefined && + (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseMessageSignature() { + return { messageDigest: undefined, signature: Buffer.alloc(0) }; +} +exports.MessageSignature = { + fromJSON(object) { + return { + messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined, + signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.messageDigest !== undefined && + (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined); + message.signature !== undefined && + (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseLogId() { + return { keyId: Buffer.alloc(0) }; +} +exports.LogId = { + fromJSON(object) { + return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.keyId !== undefined && + (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseRFC3161SignedTimestamp() { + return { signedTimestamp: Buffer.alloc(0) }; +} +exports.RFC3161SignedTimestamp = { + fromJSON(object) { + return { + signedTimestamp: isSet(object.signedTimestamp) + ? Buffer.from(bytesFromBase64(object.signedTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedTimestamp !== undefined && + (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBasePublicKey() { + return { rawBytes: undefined, keyDetails: 0, validFor: undefined }; +} +exports.PublicKey = { + fromJSON(object) { + return { + rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, + keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, + validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined); + message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails)); + message.validFor !== undefined && + (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBasePublicKeyIdentifier() { + return { hint: "" }; +} +exports.PublicKeyIdentifier = { + fromJSON(object) { + return { hint: isSet(object.hint) ? String(object.hint) : "" }; + }, + toJSON(message) { + const obj = {}; + message.hint !== undefined && (obj.hint = message.hint); + return obj; + }, +}; +function createBaseObjectIdentifier() { + return { id: [] }; +} +exports.ObjectIdentifier = { + fromJSON(object) { + return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.id) { + obj.id = message.id.map((e) => Math.round(e)); + } + else { + obj.id = []; + } + return obj; + }, +}; +function createBaseObjectIdentifierValuePair() { + return { oid: undefined, value: Buffer.alloc(0) }; +} +exports.ObjectIdentifierValuePair = { + fromJSON(object) { + return { + oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined, + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseDistinguishedName() { + return { organization: "", commonName: "" }; +} +exports.DistinguishedName = { + fromJSON(object) { + return { + organization: isSet(object.organization) ? String(object.organization) : "", + commonName: isSet(object.commonName) ? String(object.commonName) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.organization !== undefined && (obj.organization = message.organization); + message.commonName !== undefined && (obj.commonName = message.commonName); + return obj; + }, +}; +function createBaseX509Certificate() { + return { rawBytes: Buffer.alloc(0) }; +} +exports.X509Certificate = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseSubjectAlternativeName() { + return { type: 0, identity: undefined }; +} +exports.SubjectAlternativeName = { + fromJSON(object) { + return { + type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, + identity: isSet(object.regexp) + ? { $case: "regexp", regexp: String(object.regexp) } + : isSet(object.value) + ? { $case: "value", value: String(object.value) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type)); + message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp); + message.identity?.$case === "value" && (obj.value = message.identity?.value); + return obj; + }, +}; +function createBaseX509CertificateChain() { + return { certificates: [] }; +} +exports.X509CertificateChain = { + fromJSON(object) { + return { + certificates: Array.isArray(object?.certificates) + ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.certificates) { + obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined); + } + else { + obj.certificates = []; + } + return obj; + }, +}; +function createBaseTimeRange() { + return { start: undefined, end: undefined }; +} +exports.TimeRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, + end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = message.start.toISOString()); + message.end !== undefined && (obj.end = message.end.toISOString()); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js new file mode 100644 index 0000000000000..398193b2075a7 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -0,0 +1,167 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseKindVersion() { + return { kind: "", version: "" }; +} +exports.KindVersion = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? String(object.kind) : "", + version: isSet(object.version) ? String(object.version) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.version !== undefined && (obj.version = message.version); + return obj; + }, +}; +function createBaseCheckpoint() { + return { envelope: "" }; +} +exports.Checkpoint = { + fromJSON(object) { + return { envelope: isSet(object.envelope) ? String(object.envelope) : "" }; + }, + toJSON(message) { + const obj = {}; + message.envelope !== undefined && (obj.envelope = message.envelope); + return obj; + }, +}; +function createBaseInclusionProof() { + return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined }; +} +exports.InclusionProof = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), + treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0", + hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [], + checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.rootHash !== undefined && + (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0))); + message.treeSize !== undefined && (obj.treeSize = message.treeSize); + if (message.hashes) { + obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0))); + } + else { + obj.hashes = []; + } + message.checkpoint !== undefined && + (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined); + return obj; + }, +}; +function createBaseInclusionPromise() { + return { signedEntryTimestamp: Buffer.alloc(0) }; +} +exports.InclusionPromise = { + fromJSON(object) { + return { + signedEntryTimestamp: isSet(object.signedEntryTimestamp) + ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedEntryTimestamp !== undefined && + (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseTransparencyLogEntry() { + return { + logIndex: "0", + logId: undefined, + kindVersion: undefined, + integratedTime: "0", + inclusionPromise: undefined, + inclusionProof: undefined, + canonicalizedBody: Buffer.alloc(0), + }; +} +exports.TransparencyLogEntry = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined, + integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0", + inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined, + inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined, + canonicalizedBody: isSet(object.canonicalizedBody) + ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + message.kindVersion !== undefined && + (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined); + message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime); + message.inclusionPromise !== undefined && + (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined); + message.inclusionProof !== undefined && + (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined); + message.canonicalizedBody !== undefined && + (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js new file mode 100644 index 0000000000000..05e566767cdb2 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseTransparencyLogInstance() { + return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined }; +} +exports.TransparencyLogInstance = { + fromJSON(object) { + return { + baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "", + hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0, + publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined, + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl); + message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm)); + message.publicKey !== undefined && + (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + return obj; + }, +}; +function createBaseCertificateAuthority() { + return { subject: undefined, uri: "", certChain: undefined, validFor: undefined }; +} +exports.CertificateAuthority = { + fromJSON(object) { + return { + subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined, + uri: isSet(object.uri) ? String(object.uri) : "", + certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.subject !== undefined && + (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined); + message.uri !== undefined && (obj.uri = message.uri); + message.certChain !== undefined && + (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined); + message.validFor !== undefined && + (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBaseTrustedRoot() { + return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] }; +} +exports.TrustedRoot = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [], + certificateAuthorities: Array.isArray(object?.certificateAuthorities) + ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + ctlogs: Array.isArray(object?.ctlogs) + ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + timestampAuthorities: Array.isArray(object?.timestampAuthorities) + ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + if (message.tlogs) { + obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.tlogs = []; + } + if (message.certificateAuthorities) { + obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.certificateAuthorities = []; + } + if (message.ctlogs) { + obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.ctlogs = []; + } + if (message.timestampAuthorities) { + obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.timestampAuthorities = []; + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js new file mode 100644 index 0000000000000..8a72b89761869 --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -0,0 +1,273 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0; +/* eslint-disable */ +const sigstore_bundle_1 = require("./sigstore_bundle"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_trustroot_1 = require("./sigstore_trustroot"); +function createBaseCertificateIdentity() { + return { issuer: "", san: undefined, oids: [] }; +} +exports.CertificateIdentity = { + fromJSON(object) { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined, + oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined); + if (message.oids) { + obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined); + } + else { + obj.oids = []; + } + return obj; + }, +}; +function createBaseCertificateIdentities() { + return { identities: [] }; +} +exports.CertificateIdentities = { + fromJSON(object) { + return { + identities: Array.isArray(object?.identities) + ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.identities) { + obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined); + } + else { + obj.identities = []; + } + return obj; + }, +}; +function createBasePublicKeyIdentities() { + return { publicKeys: [] }; +} +exports.PublicKeyIdentities = { + fromJSON(object) { + return { + publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.publicKeys) { + obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined); + } + else { + obj.publicKeys = []; + } + return obj; + }, +}; +function createBaseArtifactVerificationOptions() { + return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined }; +} +exports.ArtifactVerificationOptions = { + fromJSON(object) { + return { + signers: isSet(object.certificateIdentities) + ? { + $case: "certificateIdentities", + certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities), + } + : isSet(object.publicKeys) + ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) } + : undefined, + tlogOptions: isSet(object.tlogOptions) + ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) + : undefined, + ctlogOptions: isSet(object.ctlogOptions) + ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) + : undefined, + tsaOptions: isSet(object.tsaOptions) + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.signers?.$case === "certificateIdentities" && + (obj.certificateIdentities = message.signers?.certificateIdentities + ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities) + : undefined); + message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys + ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys) + : undefined); + message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions + ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions) + : undefined); + message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions + ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions) + : undefined); + message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions) + : undefined); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TlogOptions() { + return { threshold: 0, performOnlineVerification: false, disable: false }; +} +exports.ArtifactVerificationOptions_TlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + performOnlineVerification: isSet(object.performOnlineVerification) + ? Boolean(object.performOnlineVerification) + : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.performOnlineVerification !== undefined && + (obj.performOnlineVerification = message.performOnlineVerification); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_CtlogOptions() { + return { threshold: 0, detachedSct: false, disable: false }; +} +exports.ArtifactVerificationOptions_CtlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() { + return { threshold: 0, disable: false }; +} +exports.ArtifactVerificationOptions_TimestampAuthorityOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifact() { + return { data: undefined }; +} +exports.Artifact = { + fromJSON(object) { + return { + data: isSet(object.artifactUri) + ? { $case: "artifactUri", artifactUri: String(object.artifactUri) } + : isSet(object.artifact) + ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri); + message.data?.$case === "artifact" && + (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined); + return obj; + }, +}; +function createBaseInput() { + return { + artifactTrustRoot: undefined, + artifactVerificationOptions: undefined, + bundle: undefined, + artifact: undefined, + }; +} +exports.Input = { + fromJSON(object) { + return { + artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, + artifactVerificationOptions: isSet(object.artifactVerificationOptions) + ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) + : undefined, + bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined, + artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.artifactTrustRoot !== undefined && + (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined); + message.artifactVerificationOptions !== undefined && + (obj.artifactVerificationOptions = message.artifactVerificationOptions + ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions) + : undefined); + message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined); + message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000000..eafb768c48fca --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000000..450abb157f31a --- /dev/null +++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.2.1", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node14": "^1.0.3", + "@types/node": "^18.14.0", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json index 286d481a4d39f..4c534fa766a5e 100644 --- a/node_modules/@sigstore/tuf/package.json +++ b/node_modules/@sigstore/tuf/package.json @@ -1,6 +1,6 @@ { "name": "@sigstore/tuf", - "version": "1.0.2", + "version": "2.0.0", "description": "Client for the Sigstore TUF repository", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -28,14 +28,14 @@ }, "devDependencies": { "@sigstore/jest": "^0.0.0", - "@tufjs/repo-mock": "^1.1.0", + "@tufjs/repo-mock": "^2.0.0", "@types/make-fetch-happen": "^10.0.0" }, "dependencies": { - "@sigstore/protobuf-specs": "^0.1.0", - "tuf-js": "^1.1.7" + "@sigstore/protobuf-specs": "^0.2.1", + "tuf-js": "^2.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/node_modules/@tufjs/canonical-json/package.json b/node_modules/@tufjs/canonical-json/package.json index 688c9b93c3a4e..886c0c3969225 100644 --- a/node_modules/@tufjs/canonical-json/package.json +++ b/node_modules/@tufjs/canonical-json/package.json @@ -1,6 +1,6 @@ { "name": "@tufjs/canonical-json", - "version": "1.0.0", + "version": "2.0.0", "description": "OLPC JSON canonicalization", "main": "lib/index.js", "typings": "lib/index.d.ts", @@ -19,7 +19,7 @@ "type": "git", "url": "git+https://github.com/theupdateframework/tuf-js.git" }, - "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme", + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme", "bugs": { "url": "https://github.com/theupdateframework/tuf-js/issues" }, @@ -29,11 +29,7 @@ "scripts": { "test": "jest" }, - "devDependencies": { - "@types/node": "^18.14.1", - "typescript": "^4.9.5" - }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js index d89a089c33092..259f6799c13a0 100644 --- a/node_modules/@tufjs/models/dist/base.js +++ b/node_modules/@tufjs/models/dist/base.js @@ -14,7 +14,7 @@ var MetadataKind; MetadataKind["Timestamp"] = "timestamp"; MetadataKind["Snapshot"] = "snapshot"; MetadataKind["Targets"] = "targets"; -})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); +})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); function isMetadataKind(value) { return (typeof value === 'string' && Object.values(MetadataKind).includes(value)); diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json index 6711ee0dababc..60368242ab556 100644 --- a/node_modules/@tufjs/models/package.json +++ b/node_modules/@tufjs/models/package.json @@ -1,6 +1,6 @@ { "name": "@tufjs/models", - "version": "1.0.4", + "version": "2.0.0", "description": "TUF metadata models", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -27,15 +27,11 @@ "url": "https://github.com/theupdateframework/tuf-js/issues" }, "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", - "devDependencies": { - "@types/node": "^18.16.3", - "typescript": "^5.0.4" - }, "dependencies": { - "@tufjs/canonical-json": "1.0.0", - "minimatch": "^9.0.0" + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.3" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js new file mode 100644 index 0000000000000..c9a8ee92b531e --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js @@ -0,0 +1,44 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appDataPath = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +function appDataPath(name) { + const homedir = os_1.default.homedir(); + switch (process.platform) { + /* istanbul ignore next */ + case 'darwin': { + const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); + return path_1.default.join(appSupport, name); + } + /* istanbul ignore next */ + case 'win32': { + const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); + return path_1.default.join(localAppData, name, 'Data'); + } + /* istanbul ignore next */ + default: { + const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); + return path_1.default.join(localData, name); + } + } +} +exports.appDataPath = appDataPath; diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js new file mode 100644 index 0000000000000..08d6b61840909 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js @@ -0,0 +1,101 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const tuf_js_1 = require("tuf-js"); +const target_1 = require("./target"); +class TUFClient { + constructor(options) { + initTufCache(options.cachePath, options.rootPath); + const remote = initRemoteConfig(options.cachePath, options.mirrorURL); + this.updater = initClient(options.cachePath, remote, options); + } + async refresh() { + return this.updater.refresh(); + } + getTarget(targetName) { + return (0, target_1.readTarget)(this.updater, targetName); + } +} +exports.TUFClient = TUFClient; +// Initializes the TUF cache directory structure including the initial +// root.json file. If the cache directory does not exist, it will be +// created. If the targets directory does not exist, it will be created. +// If the root.json file does not exist, it will be copied from the +// rootPath argument. +function initTufCache(cachePath, tufRootPath) { + const targetsPath = path_1.default.join(cachePath, 'targets'); + const cachedRootPath = path_1.default.join(cachePath, 'root.json'); + if (!fs_1.default.existsSync(cachePath)) { + fs_1.default.mkdirSync(cachePath, { recursive: true }); + } + if (!fs_1.default.existsSync(targetsPath)) { + fs_1.default.mkdirSync(targetsPath); + } + if (!fs_1.default.existsSync(cachedRootPath)) { + fs_1.default.copyFileSync(tufRootPath, cachedRootPath); + } + return cachePath; +} +// Initializes the remote.json file, which contains the URL of the TUF +// repository. If the file does not exist, it will be created. If the file +// exists, it will be parsed and returned. +function initRemoteConfig(rootDir, mirrorURL) { + let remoteConfig; + const remoteConfigPath = path_1.default.join(rootDir, 'remote.json'); + if (fs_1.default.existsSync(remoteConfigPath)) { + const data = fs_1.default.readFileSync(remoteConfigPath, 'utf-8'); + remoteConfig = JSON.parse(data); + } + if (!remoteConfig) { + remoteConfig = { mirror: mirrorURL }; + fs_1.default.writeFileSync(remoteConfigPath, JSON.stringify(remoteConfig)); + } + return remoteConfig; +} +function initClient(cachePath, remote, options) { + const baseURL = remote.mirror; + const config = { + fetchTimeout: options.timeout, + }; + // tuf-js only supports a number for fetchRetries so we have to + // convert the boolean and object options to a number. + /* istanbul ignore if */ + if (typeof options.retry !== 'undefined') { + if (typeof options.retry === 'number') { + config.fetchRetries = options.retry; + } + else if (typeof options.retry === 'object') { + config.fetchRetries = options.retry.retries; + } + else if (options.retry === true) { + config.fetchRetries = 1; + } + } + return new tuf_js_1.Updater({ + metadataBaseUrl: baseURL, + targetBaseUrl: `${baseURL}/targets`, + metadataDir: cachePath, + targetDir: path_1.default.join(cachePath, 'targets'), + config, + }); +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000000000..e13971b289ff2 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000000000..0d201c356dffc --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.initTUF = exports.getTrustedRoot = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_TUF_ROOT_PATH = '../store/public-good-instance-root.json'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +exports.getTrustedRoot = getTrustedRoot; +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +exports.initTUF = initTUF; +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH), + mirrorURL: options.mirrorURL || DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js new file mode 100644 index 0000000000000..29eaf99a7e721 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js @@ -0,0 +1,80 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readTarget = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const error_1 = require("./error"); +// Downloads and returns the specified target from the provided TUF Updater. +async function readTarget(tuf, targetPath) { + const path = await getTargetPath(tuf, targetPath); + return new Promise((resolve, reject) => { + fs_1.default.readFile(path, 'utf-8', (err, data) => { + if (err) { + reject(new error_1.TUFError({ + code: 'TUF_READ_TARGET_ERROR', + message: `error reading target ${path}`, + cause: err, + })); + } + else { + resolve(data); + } + }); + }); +} +exports.readTarget = readTarget; +// Returns the local path to the specified target. If the target is not yet +// cached locally, the provided TUF Updater will be used to download and +// cache the target. +async function getTargetPath(tuf, target) { + let targetInfo; + try { + targetInfo = await tuf.getTargetInfo(target); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_REFRESH_METADATA_ERROR', + message: 'error refreshing TUF metadata', + cause: err, + }); + } + if (!targetInfo) { + throw new error_1.TUFError({ + code: 'TUF_FIND_TARGET_ERROR', + message: `target ${target} not found`, + }); + } + let path = await tuf.findCachedTarget(targetInfo); + // An empty path here means the target has not been cached locally, or is + // out of date. In either case, we need to download it. + if (!path) { + try { + path = await tuf.downloadTarget(targetInfo); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_DOWNLOAD_TARGET_ERROR', + message: `error downloading target ${path}`, + cause: err, + }); + } + } + return path; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js new file mode 100644 index 0000000000000..0c367a8384454 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -0,0 +1,89 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Envelope = void 0; +function createBaseEnvelope() { + return { payload: Buffer.alloc(0), payloadType: "", signatures: [] }; +} +exports.Envelope = { + fromJSON(object) { + return { + payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), + payloadType: isSet(object.payloadType) ? String(object.payloadType) : "", + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.payload !== undefined && + (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0))); + message.payloadType !== undefined && (obj.payloadType = message.payloadType); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined); + } + else { + obj.signatures = []; + } + return obj; + }, +}; +function createBaseSignature() { + return { sig: Buffer.alloc(0), keyid: "" }; +} +exports.Signature = { + fromJSON(object) { + return { + sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), + keyid: isSet(object.keyid) ? String(object.keyid) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0))); + message.keyid !== undefined && (obj.keyid = message.keyid); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 0000000000000..073093b8371a8 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,185 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +function createBaseCloudEvent() { + return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined }; +} +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? String(object.id) : "", + source: isSet(object.source) ? String(object.source) : "", + specVersion: isSet(object.specVersion) ? String(object.specVersion) : "", + type: isSet(object.type) ? String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.id !== undefined && (obj.id = message.id); + message.source !== undefined && (obj.source = message.source); + message.specVersion !== undefined && (obj.specVersion = message.specVersion); + message.type !== undefined && (obj.type = message.type); + obj.attributes = {}; + if (message.attributes) { + Object.entries(message.attributes).forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + message.data?.$case === "binaryData" && + (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined); + message.data?.$case === "textData" && (obj.textData = message.data?.textData); + message.data?.$case === "protoData" && + (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_AttributesEntry() { + return { key: "", value: undefined }; +} +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && + (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_CloudEventAttributeValue() { + return { attr: undefined }; +} +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean); + message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger)); + message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString); + message.attr?.$case === "ceBytes" && + (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined); + message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri); + message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef); + message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString()); + return obj; + }, +}; +function createBaseCloudEventBatch() { + return { events: [] }; +} +exports.CloudEventBatch = { + fromJSON(object) { + return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined); + } + else { + obj.events = []; + } + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js new file mode 100644 index 0000000000000..da627499ad765 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -0,0 +1,119 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0; +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +var FieldBehavior; +(function (FieldBehavior) { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED"; + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL"; + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED"; + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY"; + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY"; + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE"; + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST"; +})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {})); +function fieldBehaviorFromJSON(object) { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; +function fieldBehaviorToJSON(object) { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +exports.fieldBehaviorToJSON = fieldBehaviorToJSON; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 0000000000000..6b3f3c97a6647 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,65 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +function createBaseAny() { + return { typeUrl: "", value: Buffer.alloc(0) }; +} +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js new file mode 100644 index 0000000000000..d429aac846043 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -0,0 +1,1308 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0; +var FieldDescriptorProto_Type; +(function (FieldDescriptorProto_Type) { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT"; + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64"; + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING"; + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP"; + /** TYPE_MESSAGE - Length-delimited aggregate. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE"; + /** TYPE_BYTES - New in version 2. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64"; + /** TYPE_SINT32 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32"; + /** TYPE_SINT64 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64"; +})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {})); +function fieldDescriptorProto_TypeFromJSON(object) { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; +function fieldDescriptorProto_TypeToJSON(object) { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; +var FieldDescriptorProto_Label; +(function (FieldDescriptorProto_Label) { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED"; +})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {})); +function fieldDescriptorProto_LabelFromJSON(object) { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; +function fieldDescriptorProto_LabelToJSON(object) { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; +/** Generated classes can be optimized for speed or code size. */ +var FileOptions_OptimizeMode; +(function (FileOptions_OptimizeMode) { + /** SPEED - Generate complete code for parsing, serialization, */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED"; + /** CODE_SIZE - etc. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE"; + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME"; +})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {})); +function fileOptions_OptimizeModeFromJSON(object) { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; +function fileOptions_OptimizeModeToJSON(object) { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; +var FieldOptions_CType; +(function (FieldOptions_CType) { + /** STRING - Default mode. */ + FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING"; + FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD"; + FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE"; +})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {})); +function fieldOptions_CTypeFromJSON(object) { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; +function fieldOptions_CTypeToJSON(object) { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; +var FieldOptions_JSType; +(function (FieldOptions_JSType) { + /** JS_NORMAL - Use the default type. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL"; + /** JS_STRING - Use JavaScript strings. */ + FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING"; + /** JS_NUMBER - Use JavaScript numbers. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER"; +})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {})); +function fieldOptions_JSTypeFromJSON(object) { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; +function fieldOptions_JSTypeToJSON(object) { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +var MethodOptions_IdempotencyLevel; +(function (MethodOptions_IdempotencyLevel) { + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN"; + /** NO_SIDE_EFFECTS - implies idempotent */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS"; + /** IDEMPOTENT - idempotent, but may have side effects */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT"; +})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {})); +function methodOptions_IdempotencyLevelFromJSON(object) { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; +function methodOptions_IdempotencyLevelToJSON(object) { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; +function createBaseFileDescriptorSet() { + return { file: [] }; +} +exports.FileDescriptorSet = { + fromJSON(object) { + return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined); + } + else { + obj.file = []; + } + return obj; + }, +}; +function createBaseFileDescriptorProto() { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} +exports.FileDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } + else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined); + } + else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined && + (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, +}; +function createBaseDescriptorProto() { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +exports.DescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined); + } + else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined); + } + else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined); + } + else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } + else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined); + } + else { + obj.oneofDecl = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseDescriptorProto_ExtensionRange() { + return { start: 0, end: 0, options: undefined }; +} +exports.DescriptorProto_ExtensionRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined && + (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseDescriptorProto_ReservedRange() { + return { start: 0, end: 0 }; +} +exports.DescriptorProto_ReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseExtensionRangeOptions() { + return { uninterpretedOption: [] }; +} +exports.ExtensionRangeOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldDescriptorProto() { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} +exports.FieldDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, +}; +function createBaseOneofDescriptorProto() { + return { name: "", options: undefined }; +} +exports.OneofDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseEnumDescriptorProto() { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} +exports.EnumDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined); + } + else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined); + } + else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } + else { + obj.reservedName = []; + } + return obj; + }, +}; +function createBaseEnumDescriptorProto_EnumReservedRange() { + return { start: 0, end: 0 }; +} +exports.EnumDescriptorProto_EnumReservedRange = { + fromJSON(object) { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +function createBaseEnumValueDescriptorProto() { + return { name: "", number: 0, options: undefined }; +} +exports.EnumValueDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined && + (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseServiceDescriptorProto() { + return { name: "", method: [], options: undefined }; +} +exports.ServiceDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined); + } + else { + obj.method = []; + } + message.options !== undefined && + (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, +}; +function createBaseMethodDescriptorProto() { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} +exports.MethodDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined && + (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, +}; +function createBaseFileOptions() { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} +exports.FileOptions = { + fromJSON(object) { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined && + (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMessageOptions() { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} +exports.MessageOptions = { + fromJSON(object) { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined && + (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseFieldOptions() { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [], + }; +} +exports.FieldOptions = { + fromJSON(object) { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseOneofOptions() { + return { uninterpretedOption: [] }; +} +exports.OneofOptions = { + fromJSON(object) { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumOptions() { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} +exports.EnumOptions = { + fromJSON(object) { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseEnumValueOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.EnumValueOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseServiceOptions() { + return { deprecated: false, uninterpretedOption: [] }; +} +exports.ServiceOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseMethodOptions() { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} +exports.MethodOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined && + (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined); + } + else { + obj.uninterpretedOption = []; + } + return obj; + }, +}; +function createBaseUninterpretedOption() { + return { + name: [], + identifierValue: "", + positiveIntValue: "0", + negativeIntValue: "0", + doubleValue: 0, + stringValue: Buffer.alloc(0), + aggregateValue: "", + }; +} +exports.UninterpretedOption = { + fromJSON(object) { + return { + name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0", + negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0", + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined); + } + else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue); + message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined && + (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0))); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, +}; +function createBaseUninterpretedOption_NamePart() { + return { namePart: "", isExtension: false }; +} +exports.UninterpretedOption_NamePart = { + fromJSON(object) { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, +}; +function createBaseSourceCodeInfo() { + return { location: [] }; +} +exports.SourceCodeInfo = { + fromJSON(object) { + return { + location: Array.isArray(object?.location) + ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined); + } + else { + obj.location = []; + } + return obj; + }, +}; +function createBaseSourceCodeInfo_Location() { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} +exports.SourceCodeInfo_Location = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e) => String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } + else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } + else { + obj.leadingDetachedComments = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo() { + return { annotation: [] }; +} +exports.GeneratedCodeInfo = { + fromJSON(object) { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } + else { + obj.annotation = []; + } + return obj; + }, +}; +function createBaseGeneratedCodeInfo_Annotation() { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} +exports.GeneratedCodeInfo_Annotation = { + fromJSON(object) { + return { + path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } + else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js new file mode 100644 index 0000000000000..159135fe87172 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js @@ -0,0 +1,24 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +function createBaseTimestamp() { + return { seconds: "0", nanos: 0 }; +} +exports.Timestamp = { + fromJSON(object) { + return { + seconds: isSet(object.seconds) ? String(object.seconds) : "0", + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + toJSON(message) { + const obj = {}; + message.seconds !== undefined && (obj.seconds = message.seconds); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js new file mode 100644 index 0000000000000..1ef3e1b3356b7 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js @@ -0,0 +1,106 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0; +/* eslint-disable */ +const envelope_1 = require("./envelope"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_rekor_1 = require("./sigstore_rekor"); +function createBaseTimestampVerificationData() { + return { rfc3161Timestamps: [] }; +} +exports.TimestampVerificationData = { + fromJSON(object) { + return { + rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps) + ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.rfc3161Timestamps) { + obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined); + } + else { + obj.rfc3161Timestamps = []; + } + return obj; + }, +}; +function createBaseVerificationMaterial() { + return { content: undefined, tlogEntries: [], timestampVerificationData: undefined }; +} +exports.VerificationMaterial = { + fromJSON(object) { + return { + content: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) } + : isSet(object.x509CertificateChain) + ? { + $case: "x509CertificateChain", + x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain), + } + : undefined, + tlogEntries: Array.isArray(object?.tlogEntries) + ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e)) + : [], + timestampVerificationData: isSet(object.timestampVerificationData) + ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.content?.$case === "publicKey" && + (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined); + message.content?.$case === "x509CertificateChain" && + (obj.x509CertificateChain = message.content?.x509CertificateChain + ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain) + : undefined); + if (message.tlogEntries) { + obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined); + } + else { + obj.tlogEntries = []; + } + message.timestampVerificationData !== undefined && + (obj.timestampVerificationData = message.timestampVerificationData + ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData) + : undefined); + return obj; + }, +}; +function createBaseBundle() { + return { mediaType: "", verificationMaterial: undefined, content: undefined }; +} +exports.Bundle = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + verificationMaterial: isSet(object.verificationMaterial) + ? exports.VerificationMaterial.fromJSON(object.verificationMaterial) + : undefined, + content: isSet(object.messageSignature) + ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) } + : isSet(object.dsseEnvelope) + ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial + ? exports.VerificationMaterial.toJSON(message.verificationMaterial) + : undefined); + message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature + ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature) + : undefined); + message.content?.$case === "dsseEnvelope" && + (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined); + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js new file mode 100644 index 0000000000000..bcd654e9154b9 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -0,0 +1,457 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0; +/* eslint-disable */ +const timestamp_1 = require("./google/protobuf/timestamp"); +/** + * Only a subset of the secure hash standard algorithms are supported. + * See for more + * details. + * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force + * any proto JSON serialization to emit the used hash algorithm, as default + * option is to *omit* the default value of an enum (which is the first + * value, represented by '0'. + */ +var HashAlgorithm; +(function (HashAlgorithm) { + HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED"; + HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256"; +})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {})); +function hashAlgorithmFromJSON(object) { + switch (object) { + case 0: + case "HASH_ALGORITHM_UNSPECIFIED": + return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; + case 1: + case "SHA2_256": + return HashAlgorithm.SHA2_256; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; +function hashAlgorithmToJSON(object) { + switch (object) { + case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: + return "HASH_ALGORITHM_UNSPECIFIED"; + case HashAlgorithm.SHA2_256: + return "SHA2_256"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +exports.hashAlgorithmToJSON = hashAlgorithmToJSON; +/** + * Details of a specific public key, capturing the the key encoding method, + * and signature algorithm. + * To avoid the possibility of contradicting formats such as PKCS1 with + * ED25519 the valid permutations are listed as a linear set instead of a + * cartesian set (i.e one combined variable instead of two, one for encoding + * and one for the signature algorithm). + */ +var PublicKeyDetails; +(function (PublicKeyDetails) { + PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + /** PKCS1_RSA_PKCS1V5 - RSA */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5"; + /** PKCS1_RSA_PSS - See RFC8017 */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS"; + /** PKIX_ECDSA_P256_SHA_256 - ECDSA */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256"; + /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256"; + /** PKIX_ED25519 - Ed 25519 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519"; +})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {})); +function publicKeyDetailsFromJSON(object) { + switch (object) { + case 0: + case "PUBLIC_KEY_DETAILS_UNSPECIFIED": + return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; + case 1: + case "PKCS1_RSA_PKCS1V5": + return PublicKeyDetails.PKCS1_RSA_PKCS1V5; + case 2: + case "PKCS1_RSA_PSS": + return PublicKeyDetails.PKCS1_RSA_PSS; + case 3: + case "PKIX_RSA_PKCS1V5": + return PublicKeyDetails.PKIX_RSA_PKCS1V5; + case 4: + case "PKIX_RSA_PSS": + return PublicKeyDetails.PKIX_RSA_PSS; + case 5: + case "PKIX_ECDSA_P256_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; + case 6: + case "PKIX_ECDSA_P256_HMAC_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; + case 7: + case "PKIX_ED25519": + return PublicKeyDetails.PKIX_ED25519; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; +function publicKeyDetailsToJSON(object) { + switch (object) { + case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: + return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + case PublicKeyDetails.PKCS1_RSA_PKCS1V5: + return "PKCS1_RSA_PKCS1V5"; + case PublicKeyDetails.PKCS1_RSA_PSS: + return "PKCS1_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V5: + return "PKIX_RSA_PKCS1V5"; + case PublicKeyDetails.PKIX_RSA_PSS: + return "PKIX_RSA_PSS"; + case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: + return "PKIX_ECDSA_P256_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: + return "PKIX_ECDSA_P256_HMAC_SHA_256"; + case PublicKeyDetails.PKIX_ED25519: + return "PKIX_ED25519"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; +var SubjectAlternativeNameType; +(function (SubjectAlternativeNameType) { + SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL"; + SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI"; + /** + * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 + * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san + * for more details. + */ + SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME"; +})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {})); +function subjectAlternativeNameTypeFromJSON(object) { + switch (object) { + case 0: + case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": + return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; + case 1: + case "EMAIL": + return SubjectAlternativeNameType.EMAIL; + case 2: + case "URI": + return SubjectAlternativeNameType.URI; + case 3: + case "OTHER_NAME": + return SubjectAlternativeNameType.OTHER_NAME; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; +function subjectAlternativeNameTypeToJSON(object) { + switch (object) { + case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: + return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + case SubjectAlternativeNameType.EMAIL: + return "EMAIL"; + case SubjectAlternativeNameType.URI: + return "URI"; + case SubjectAlternativeNameType.OTHER_NAME: + return "OTHER_NAME"; + default: + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; +function createBaseHashOutput() { + return { algorithm: 0, digest: Buffer.alloc(0) }; +} +exports.HashOutput = { + fromJSON(object) { + return { + algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm)); + message.digest !== undefined && + (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseMessageSignature() { + return { messageDigest: undefined, signature: Buffer.alloc(0) }; +} +exports.MessageSignature = { + fromJSON(object) { + return { + messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined, + signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.messageDigest !== undefined && + (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined); + message.signature !== undefined && + (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseLogId() { + return { keyId: Buffer.alloc(0) }; +} +exports.LogId = { + fromJSON(object) { + return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.keyId !== undefined && + (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseRFC3161SignedTimestamp() { + return { signedTimestamp: Buffer.alloc(0) }; +} +exports.RFC3161SignedTimestamp = { + fromJSON(object) { + return { + signedTimestamp: isSet(object.signedTimestamp) + ? Buffer.from(bytesFromBase64(object.signedTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedTimestamp !== undefined && + (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBasePublicKey() { + return { rawBytes: undefined, keyDetails: 0, validFor: undefined }; +} +exports.PublicKey = { + fromJSON(object) { + return { + rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, + keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, + validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined); + message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails)); + message.validFor !== undefined && + (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBasePublicKeyIdentifier() { + return { hint: "" }; +} +exports.PublicKeyIdentifier = { + fromJSON(object) { + return { hint: isSet(object.hint) ? String(object.hint) : "" }; + }, + toJSON(message) { + const obj = {}; + message.hint !== undefined && (obj.hint = message.hint); + return obj; + }, +}; +function createBaseObjectIdentifier() { + return { id: [] }; +} +exports.ObjectIdentifier = { + fromJSON(object) { + return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.id) { + obj.id = message.id.map((e) => Math.round(e)); + } + else { + obj.id = []; + } + return obj; + }, +}; +function createBaseObjectIdentifierValuePair() { + return { oid: undefined, value: Buffer.alloc(0) }; +} +exports.ObjectIdentifierValuePair = { + fromJSON(object) { + return { + oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined, + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseDistinguishedName() { + return { organization: "", commonName: "" }; +} +exports.DistinguishedName = { + fromJSON(object) { + return { + organization: isSet(object.organization) ? String(object.organization) : "", + commonName: isSet(object.commonName) ? String(object.commonName) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.organization !== undefined && (obj.organization = message.organization); + message.commonName !== undefined && (obj.commonName = message.commonName); + return obj; + }, +}; +function createBaseX509Certificate() { + return { rawBytes: Buffer.alloc(0) }; +} +exports.X509Certificate = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + message.rawBytes !== undefined && + (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseSubjectAlternativeName() { + return { type: 0, identity: undefined }; +} +exports.SubjectAlternativeName = { + fromJSON(object) { + return { + type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, + identity: isSet(object.regexp) + ? { $case: "regexp", regexp: String(object.regexp) } + : isSet(object.value) + ? { $case: "value", value: String(object.value) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type)); + message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp); + message.identity?.$case === "value" && (obj.value = message.identity?.value); + return obj; + }, +}; +function createBaseX509CertificateChain() { + return { certificates: [] }; +} +exports.X509CertificateChain = { + fromJSON(object) { + return { + certificates: Array.isArray(object?.certificates) + ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.certificates) { + obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined); + } + else { + obj.certificates = []; + } + return obj; + }, +}; +function createBaseTimeRange() { + return { start: undefined, end: undefined }; +} +exports.TimeRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, + end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.start !== undefined && (obj.start = message.start.toISOString()); + message.end !== undefined && (obj.end = message.end.toISOString()); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js new file mode 100644 index 0000000000000..398193b2075a7 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -0,0 +1,167 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseKindVersion() { + return { kind: "", version: "" }; +} +exports.KindVersion = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? String(object.kind) : "", + version: isSet(object.version) ? String(object.version) : "", + }; + }, + toJSON(message) { + const obj = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.version !== undefined && (obj.version = message.version); + return obj; + }, +}; +function createBaseCheckpoint() { + return { envelope: "" }; +} +exports.Checkpoint = { + fromJSON(object) { + return { envelope: isSet(object.envelope) ? String(object.envelope) : "" }; + }, + toJSON(message) { + const obj = {}; + message.envelope !== undefined && (obj.envelope = message.envelope); + return obj; + }, +}; +function createBaseInclusionProof() { + return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined }; +} +exports.InclusionProof = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), + treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0", + hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [], + checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.rootHash !== undefined && + (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0))); + message.treeSize !== undefined && (obj.treeSize = message.treeSize); + if (message.hashes) { + obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0))); + } + else { + obj.hashes = []; + } + message.checkpoint !== undefined && + (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined); + return obj; + }, +}; +function createBaseInclusionPromise() { + return { signedEntryTimestamp: Buffer.alloc(0) }; +} +exports.InclusionPromise = { + fromJSON(object) { + return { + signedEntryTimestamp: isSet(object.signedEntryTimestamp) + ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.signedEntryTimestamp !== undefined && + (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0))); + return obj; + }, +}; +function createBaseTransparencyLogEntry() { + return { + logIndex: "0", + logId: undefined, + kindVersion: undefined, + integratedTime: "0", + inclusionPromise: undefined, + inclusionProof: undefined, + canonicalizedBody: Buffer.alloc(0), + }; +} +exports.TransparencyLogEntry = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0", + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined, + integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0", + inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined, + inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined, + canonicalizedBody: isSet(object.canonicalizedBody) + ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.logIndex !== undefined && (obj.logIndex = message.logIndex); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + message.kindVersion !== undefined && + (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined); + message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime); + message.inclusionPromise !== undefined && + (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined); + message.inclusionProof !== undefined && + (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined); + message.canonicalizedBody !== undefined && + (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js new file mode 100644 index 0000000000000..05e566767cdb2 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +function createBaseTransparencyLogInstance() { + return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined }; +} +exports.TransparencyLogInstance = { + fromJSON(object) { + return { + baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "", + hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0, + publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined, + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl); + message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm)); + message.publicKey !== undefined && + (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined); + message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined); + return obj; + }, +}; +function createBaseCertificateAuthority() { + return { subject: undefined, uri: "", certChain: undefined, validFor: undefined }; +} +exports.CertificateAuthority = { + fromJSON(object) { + return { + subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined, + uri: isSet(object.uri) ? String(object.uri) : "", + certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.subject !== undefined && + (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined); + message.uri !== undefined && (obj.uri = message.uri); + message.certChain !== undefined && + (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined); + message.validFor !== undefined && + (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined); + return obj; + }, +}; +function createBaseTrustedRoot() { + return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] }; +} +exports.TrustedRoot = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? String(object.mediaType) : "", + tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [], + certificateAuthorities: Array.isArray(object?.certificateAuthorities) + ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + ctlogs: Array.isArray(object?.ctlogs) + ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + timestampAuthorities: Array.isArray(object?.timestampAuthorities) + ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + if (message.tlogs) { + obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.tlogs = []; + } + if (message.certificateAuthorities) { + obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.certificateAuthorities = []; + } + if (message.ctlogs) { + obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined); + } + else { + obj.ctlogs = []; + } + if (message.timestampAuthorities) { + obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined); + } + else { + obj.timestampAuthorities = []; + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js new file mode 100644 index 0000000000000..8a72b89761869 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -0,0 +1,273 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0; +/* eslint-disable */ +const sigstore_bundle_1 = require("./sigstore_bundle"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_trustroot_1 = require("./sigstore_trustroot"); +function createBaseCertificateIdentity() { + return { issuer: "", san: undefined, oids: [] }; +} +exports.CertificateIdentity = { + fromJSON(object) { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined, + oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined); + if (message.oids) { + obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined); + } + else { + obj.oids = []; + } + return obj; + }, +}; +function createBaseCertificateIdentities() { + return { identities: [] }; +} +exports.CertificateIdentities = { + fromJSON(object) { + return { + identities: Array.isArray(object?.identities) + ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.identities) { + obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined); + } + else { + obj.identities = []; + } + return obj; + }, +}; +function createBasePublicKeyIdentities() { + return { publicKeys: [] }; +} +exports.PublicKeyIdentities = { + fromJSON(object) { + return { + publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.publicKeys) { + obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined); + } + else { + obj.publicKeys = []; + } + return obj; + }, +}; +function createBaseArtifactVerificationOptions() { + return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined }; +} +exports.ArtifactVerificationOptions = { + fromJSON(object) { + return { + signers: isSet(object.certificateIdentities) + ? { + $case: "certificateIdentities", + certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities), + } + : isSet(object.publicKeys) + ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) } + : undefined, + tlogOptions: isSet(object.tlogOptions) + ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) + : undefined, + ctlogOptions: isSet(object.ctlogOptions) + ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) + : undefined, + tsaOptions: isSet(object.tsaOptions) + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.signers?.$case === "certificateIdentities" && + (obj.certificateIdentities = message.signers?.certificateIdentities + ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities) + : undefined); + message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys + ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys) + : undefined); + message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions + ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions) + : undefined); + message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions + ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions) + : undefined); + message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions) + : undefined); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TlogOptions() { + return { threshold: 0, performOnlineVerification: false, disable: false }; +} +exports.ArtifactVerificationOptions_TlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + performOnlineVerification: isSet(object.performOnlineVerification) + ? Boolean(object.performOnlineVerification) + : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.performOnlineVerification !== undefined && + (obj.performOnlineVerification = message.performOnlineVerification); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_CtlogOptions() { + return { threshold: 0, detachedSct: false, disable: false }; +} +exports.ArtifactVerificationOptions_CtlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() { + return { threshold: 0, disable: false }; +} +exports.ArtifactVerificationOptions_TimestampAuthorityOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? Number(object.threshold) : 0, + disable: isSet(object.disable) ? Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + message.threshold !== undefined && (obj.threshold = Math.round(message.threshold)); + message.disable !== undefined && (obj.disable = message.disable); + return obj; + }, +}; +function createBaseArtifact() { + return { data: undefined }; +} +exports.Artifact = { + fromJSON(object) { + return { + data: isSet(object.artifactUri) + ? { $case: "artifactUri", artifactUri: String(object.artifactUri) } + : isSet(object.artifact) + ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri); + message.data?.$case === "artifact" && + (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined); + return obj; + }, +}; +function createBaseInput() { + return { + artifactTrustRoot: undefined, + artifactVerificationOptions: undefined, + bundle: undefined, + artifact: undefined, + }; +} +exports.Input = { + fromJSON(object) { + return { + artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, + artifactVerificationOptions: isSet(object.artifactVerificationOptions) + ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) + : undefined, + bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined, + artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.artifactTrustRoot !== undefined && + (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined); + message.artifactVerificationOptions !== undefined && + (obj.artifactVerificationOptions = message.artifactVerificationOptions + ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions) + : undefined); + message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined); + message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000000..eafb768c48fca --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000000..450abb157f31a --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.2.1", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node14": "^1.0.3", + "@types/node": "^18.14.0", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000000000..3473dfef2cde9 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,41 @@ +{ + "name": "@sigstore/tuf", + "version": "1.0.3", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@tufjs/repo-mock": "^1.1.0", + "@types/make-fetch-happen": "^10.0.0" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.0", + "tuf-js": "^1.1.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json b/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json new file mode 100644 index 0000000000000..e95c7e88cdf09 --- /dev/null +++ b/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json @@ -0,0 +1 @@ +{"signed":{"_type":"root","spec_version":"1.0","version":7,"expires":"2023-10-04T13:08:11Z","keys":{"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"}},"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"}},"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"}},"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"}},"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"}},"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"}},"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"}}},"roles":{"root":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"snapshot":{"keyids":["45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"],"threshold":1},"targets":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"timestamp":{"keyids":["e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"],"threshold":1}},"consistent_snapshot":true},"signatures":[{"keyid":"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","sig":"3046022100c0610c0055ce5c4a52d054d7322e7b514d55baf44423d63aa4daa077cc60fd1f022100a097f2803f090fb66c42ead915a2c46ebe7db53a32bf18f2188275cc936f8bdd"},{"keyid":"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","sig":"304502203134f0468810299d5493a867c40630b341296b92e59c29821311d353343bb3a4022100e667ae3d304e7e3da0894c7425f6b9ecd917106841280e5cf6f3496ad5f8f68e"},{"keyid":"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","sig":"3045022037fe5f45426f21eaaf4730d2136f2b1611d6379688f79b9d1e3f61719997135c022100b63b022d7b79d4694b96f416d88aa4d7b1a3bff8a01f4fb51e0f42137c7d2d06"},{"keyid":"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de","sig":"3044022007cc8fcc4940809f2751ad5b535f4c5f53f5b4952f5b5696b09668e743306ac1022006dfcdf94e94c92163eeb1b47796db62cedaa730aa13aa61b573fe23714730f2"}]} diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE b/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js b/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js new file mode 100644 index 0000000000000..d480696de1f6c --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js @@ -0,0 +1,64 @@ +const COMMA = ','; +const COLON = ':'; +const LEFT_SQUARE_BRACKET = '['; +const RIGHT_SQUARE_BRACKET = ']'; +const LEFT_CURLY_BRACKET = '{'; +const RIGHT_CURLY_BRACKET = '}'; + +// Recursively encodes the supplied object according to the canonical JSON form +// as specified at http://wiki.laptop.org/go/Canonical_JSON. It's a restricted +// dialect of JSON in which keys are lexically sorted, floats are not allowed, +// and only double quotes and backslashes are escaped. +function canonicalize(object) { + const buffer = []; + if (typeof object === 'string') { + buffer.push(canonicalizeString(object)); + } else if (typeof object === 'boolean') { + buffer.push(JSON.stringify(object)); + } else if (Number.isInteger(object)) { + buffer.push(JSON.stringify(object)); + } else if (object === null) { + buffer.push(JSON.stringify(object)); + } else if (Array.isArray(object)) { + buffer.push(LEFT_SQUARE_BRACKET); + let first = true; + object.forEach((element) => { + if (!first) { + buffer.push(COMMA); + } + first = false; + buffer.push(canonicalize(element)); + }); + buffer.push(RIGHT_SQUARE_BRACKET); + } else if (typeof object === 'object') { + buffer.push(LEFT_CURLY_BRACKET); + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer.push(COMMA); + } + first = false; + buffer.push(canonicalizeString(property)); + buffer.push(COLON); + buffer.push(canonicalize(object[property])); + }); + buffer.push(RIGHT_CURLY_BRACKET); + } else { + throw new TypeError('cannot encode ' + object.toString()); + } + + return buffer.join(''); +} + +// String canonicalization consists of escaping backslash (\) and double +// quote (") characters and wrapping the resulting string in double quotes. +function canonicalizeString(string) { + const escapedString = string.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); + return '"' + escapedString + '"'; +} + +module.exports = { + canonicalize, +}; diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json b/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json new file mode 100644 index 0000000000000..688c9b93c3a4e --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json @@ -0,0 +1,39 @@ +{ + "name": "@tufjs/canonical-json", + "version": "1.0.0", + "description": "OLPC JSON canonicalization", + "main": "lib/index.js", + "typings": "lib/index.d.ts", + "license": "MIT", + "keywords": [ + "json", + "canonical", + "canonicalize", + "canonicalization", + "crypto", + "signature", + "olpc" + ], + "author": "bdehamer@github.com", + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "files": [ + "lib/" + ], + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/node": "^18.14.1", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js new file mode 100644 index 0000000000000..d89a089c33092 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js @@ -0,0 +1,83 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +exports.isMetadataKind = isMetadataKind; +/*** + * A base class for the signed part of TUF metadata. + * + * Objects with base class Signed are usually included in a ``Metadata`` object + * on the signed attribute. This class provides attributes and methods that + * are common for all TUF metadata types (roles). + */ +class Signed { + constructor(options) { + this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); + const specList = this.specVersion.split('.'); + if (!(specList.length === 2 || specList.length === 3) || + !specList.every((item) => isNumeric(item))) { + throw new error_1.ValueError('Failed to parse specVersion'); + } + // major version must match + if (specList[0] != SPECIFICATION_VERSION[0]) { + throw new error_1.ValueError('Unsupported specVersion'); + } + this.expires = options.expires || new Date().toISOString(); + this.version = options.version || 1; + this.unrecognizedFields = options.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Signed)) { + return false; + } + return (this.specVersion === other.specVersion && + this.expires === other.expires && + this.version === other.version && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + isExpired(referenceTime) { + if (!referenceTime) { + referenceTime = new Date(); + } + return referenceTime >= new Date(this.expires); + } + static commonFieldsFromJSON(data) { + const { spec_version, expires, version, ...rest } = data; + if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) { + throw new TypeError('spec_version must be a string'); + } + if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) { + throw new TypeError('expires must be a string'); + } + if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) { + throw new TypeError('version must be a number'); + } + return { + specVersion: spec_version, + expires, + version, + unrecognizedFields: rest, + }; + } +} +exports.Signed = Signed; +function isNumeric(str) { + return !isNaN(Number(str)); +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js new file mode 100644 index 0000000000000..7165f1e244393 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js @@ -0,0 +1,115 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Delegations = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container object storing information about all delegations. + * + * Targets roles that are trusted to provide signed metadata files + * describing targets with designated pathnames and/or further delegations. + */ +class Delegations { + constructor(options) { + this.keys = options.keys; + this.unrecognizedFields = options.unrecognizedFields || {}; + if (options.roles) { + if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { + throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); + } + } + this.succinctRoles = options.succinctRoles; + this.roles = options.roles; + } + equals(other) { + if (!(other instanceof Delegations)) { + return false; + } + return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && + util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); + } + *rolesForTarget(targetPath) { + if (this.roles) { + for (const role of Object.values(this.roles)) { + if (role.isDelegatedPath(targetPath)) { + yield { role: role.name, terminating: role.terminating }; + } + } + } + else if (this.succinctRoles) { + yield { + role: this.succinctRoles.getRoleForTarget(targetPath), + terminating: true, + }; + } + } + toJSON() { + const json = { + keys: keysToJSON(this.keys), + ...this.unrecognizedFields, + }; + if (this.roles) { + json.roles = rolesToJSON(this.roles); + } + else if (this.succinctRoles) { + json.succinct_roles = this.succinctRoles.toJSON(); + } + return json; + } + static fromJSON(data) { + const { keys, roles, succinct_roles, ...unrecognizedFields } = data; + let succinctRoles; + if (utils_1.guard.isObject(succinct_roles)) { + succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); + } + return new Delegations({ + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + unrecognizedFields, + succinctRoles, + }); + } +} +exports.Delegations = Delegations; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyId, key]) => ({ + ...acc, + [keyId]: key.toJSON(), + }), {}); +} +function rolesToJSON(roles) { + return Object.values(roles).map((role) => role.toJSON()); +} +function keysFromJSON(data) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys is malformed'); + } + return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); +} +function rolesFromJSON(data) { + let roleMap; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { + throw new TypeError('roles is malformed'); + } + roleMap = data.reduce((acc, role) => { + const delegatedRole = role_1.DelegatedRole.fromJSON(role); + return { + ...acc, + [delegatedRole.name]: delegatedRole, + }; + }, {}); + } + return roleMap; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000000..ba80698747ba0 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js new file mode 100644 index 0000000000000..b35fe5950bbb7 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js @@ -0,0 +1,183 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TargetFile = exports.MetaFile = void 0; +const crypto_1 = __importDefault(require("crypto")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +// A container with information about a particular metadata file. +// +// This class is used for Timestamp and Snapshot metadata. +class MetaFile { + constructor(opts) { + if (opts.version <= 0) { + throw new error_1.ValueError('Metafile version must be at least 1'); + } + if (opts.length !== undefined) { + validateLength(opts.length); + } + this.version = opts.version; + this.length = opts.length; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof MetaFile)) { + return false; + } + return (this.version === other.version && + this.length === other.length && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + verify(data) { + // Verifies that the given data matches the expected length. + if (this.length !== undefined) { + if (data.length !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); + } + } + // Verifies that the given data matches the supplied hashes. + if (this.hashes) { + Object.entries(this.hashes).forEach(([key, value]) => { + let hash; + try { + hash = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + const observedHash = hash.update(data).digest('hex'); + if (observedHash !== value) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); + } + }); + } + } + toJSON() { + const json = { + version: this.version, + ...this.unrecognizedFields, + }; + if (this.length !== undefined) { + json.length = this.length; + } + if (this.hashes) { + json.hashes = this.hashes; + } + return json; + } + static fromJSON(data) { + const { version, length, hashes, ...rest } = data; + if (typeof version !== 'number') { + throw new TypeError('version must be a number'); + } + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must be string keys and values'); + } + return new MetaFile({ + version, + length, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.MetaFile = MetaFile; +// Container for info about a particular target file. +// +// This class is used for Target metadata. +class TargetFile { + constructor(opts) { + validateLength(opts.length); + this.length = opts.length; + this.path = opts.path; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + get custom() { + const custom = this.unrecognizedFields['custom']; + if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { + return {}; + } + return custom; + } + equals(other) { + if (!(other instanceof TargetFile)) { + return false; + } + return (this.length === other.length && + this.path === other.path && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + async verify(stream) { + let observedLength = 0; + // Create a digest for each hash algorithm + const digests = Object.keys(this.hashes).reduce((acc, key) => { + try { + acc[key] = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + return acc; + }, {}); + // Read stream chunk by chunk + for await (const chunk of stream) { + // Keep running tally of stream length + observedLength += chunk.length; + // Append chunk to each digest + Object.values(digests).forEach((digest) => { + digest.update(chunk); + }); + } + // Verify length matches expected value + if (observedLength !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); + } + // Verify each digest matches expected value + Object.entries(digests).forEach(([key, value]) => { + const expected = this.hashes[key]; + const actual = value.digest('hex'); + if (actual !== expected) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); + } + }); + } + toJSON() { + return { + length: this.length, + hashes: this.hashes, + ...this.unrecognizedFields, + }; + } + static fromJSON(path, data) { + const { length, hashes, ...rest } = data; + if (typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (!utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must have string keys and values'); + } + return new TargetFile({ + length, + path, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.TargetFile = TargetFile; +// Check that supplied length if valid +function validateLength(length) { + if (length < 0) { + throw new error_1.ValueError('Length must be at least 0'); + } +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000000..a4dc783659f04 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js new file mode 100644 index 0000000000000..5e55b09d7c6dd --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js @@ -0,0 +1,85 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Key = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); +// A container class representing the public portion of a Key. +class Key { + constructor(options) { + const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; + this.keyID = keyID; + this.keyType = keyType; + this.scheme = scheme; + this.keyVal = keyVal; + this.unrecognizedFields = unrecognizedFields || {}; + } + // Verifies the that the metadata.signatures contains a signature made with + // this key and is correctly signed. + verifySignature(metadata) { + const signature = metadata.signatures[this.keyID]; + if (!signature) + throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); + if (!this.keyVal.public) + throw new error_1.UnsignedMetadataError('no public key found'); + const publicKey = (0, key_1.getPublicKey)({ + keyType: this.keyType, + scheme: this.scheme, + keyVal: this.keyVal.public, + }); + const signedData = metadata.signed.toJSON(); + try { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + catch (error) { + if (error instanceof error_1.UnsignedMetadataError) { + throw error; + } + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + equals(other) { + if (!(other instanceof Key)) { + return false; + } + return (this.keyID === other.keyID && + this.keyType === other.keyType && + this.scheme === other.scheme && + util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keytype: this.keyType, + scheme: this.scheme, + keyval: this.keyVal, + ...this.unrecognizedFields, + }; + } + static fromJSON(keyID, data) { + const { keytype, scheme, keyval, ...rest } = data; + if (typeof keytype !== 'string') { + throw new TypeError('keytype must be a string'); + } + if (typeof scheme !== 'string') { + throw new TypeError('scheme must be a string'); + } + if (!utils_1.guard.isStringRecord(keyval)) { + throw new TypeError('keyval must be a string record'); + } + return new Key({ + keyID, + keyType: keytype, + scheme, + keyVal: keyval, + unrecognizedFields: rest, + }); + } +} +exports.Key = Key; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js new file mode 100644 index 0000000000000..9668b6f14fa70 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js @@ -0,0 +1,158 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Metadata = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const root_1 = require("./root"); +const signature_1 = require("./signature"); +const snapshot_1 = require("./snapshot"); +const targets_1 = require("./targets"); +const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +/*** + * A container for signed TUF metadata. + * + * Provides methods to convert to and from json, read and write to and + * from JSON and to create and verify metadata signatures. + * + * ``Metadata[T]`` is a generic container type where T can be any one type of + * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this + * is to allow static type checking of the signed attribute in code using + * Metadata:: + * + * root_md = Metadata[Root].fromJSON("root.json") + * # root_md type is now Metadata[Root]. This means signed and its + * # attributes like consistent_snapshot are now statically typed and the + * # types can be verified by static type checkers and shown by IDEs + * + * Using a type constraint is not required but not doing so means T is not a + * specific type so static typing cannot happen. Note that the type constraint + * ``[Root]`` is not validated at runtime (as pure annotations are not available + * then). + * + * Apart from ``expires`` all of the arguments to the inner constructors have + * reasonable default values for new metadata. + */ +class Metadata { + constructor(signed, signatures, unrecognizedFields) { + this.signed = signed; + this.signatures = signatures || {}; + this.unrecognizedFields = unrecognizedFields || {}; + } + sign(signer, append = true) { + const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } + verifyDelegate(delegatedRole, delegatedMetadata) { + let role; + let keys = {}; + switch (this.signed.type) { + case base_1.MetadataKind.Root: + keys = this.signed.keys; + role = this.signed.roles[delegatedRole]; + break; + case base_1.MetadataKind.Targets: + if (!this.signed.delegations) { + throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); + } + keys = this.signed.delegations.keys; + if (this.signed.delegations.roles) { + role = this.signed.delegations.roles[delegatedRole]; + } + else if (this.signed.delegations.succinctRoles) { + if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { + role = this.signed.delegations.succinctRoles; + } + } + break; + default: + throw new TypeError('invalid metadata type'); + } + if (!role) { + throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); + } + const signingKeys = new Set(); + role.keyIDs.forEach((keyID) => { + const key = keys[keyID]; + // If we dont' have the key, continue checking other keys + if (!key) { + return; + } + try { + key.verifySignature(delegatedMetadata); + signingKeys.add(key.keyID); + } + catch (error) { + // continue + } + }); + if (signingKeys.size < role.threshold) { + throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); + } + } + equals(other) { + if (!(other instanceof Metadata)) { + return false; + } + return (this.signed.equals(other.signed) && + util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } + static fromJSON(type, data) { + const { signed, signatures, ...rest } = data; + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { + throw new TypeError('signed is not defined'); + } + if (type !== signed._type) { + throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); + } + let signedObj; + switch (type) { + case base_1.MetadataKind.Root: + signedObj = root_1.Root.fromJSON(signed); + break; + case base_1.MetadataKind.Timestamp: + signedObj = timestamp_1.Timestamp.fromJSON(signed); + break; + case base_1.MetadataKind.Snapshot: + signedObj = snapshot_1.Snapshot.fromJSON(signed); + break; + case base_1.MetadataKind.Targets: + signedObj = targets_1.Targets.fromJSON(signed); + break; + default: + throw new TypeError('invalid metadata type'); + } + const sigMap = signaturesFromJSON(signatures); + return new Metadata(signedObj, sigMap, rest); + } +} +exports.Metadata = Metadata; +function signaturesFromJSON(data) { + if (!utils_1.guard.isObjectArray(data)) { + throw new TypeError('signatures is not an array'); + } + return data.reduce((acc, sigData) => { + const signature = signature_1.Signature.fromJSON(sigData); + return { ...acc, [signature.keyID]: signature }; + }, {}); +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js new file mode 100644 index 0000000000000..f7ddbc6fe3f38 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js @@ -0,0 +1,299 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; +const crypto_1 = __importDefault(require("crypto")); +const minimatch_1 = require("minimatch"); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +exports.TOP_LEVEL_ROLE_NAMES = [ + 'root', + 'targets', + 'snapshot', + 'timestamp', +]; +/** + * Container that defines which keys are required to sign roles metadata. + * + * Role defines how many keys are required to successfully sign the roles + * metadata, and which keys are accepted. + */ +class Role { + constructor(options) { + const { keyIDs, threshold, unrecognizedFields } = options; + if (hasDuplicates(keyIDs)) { + throw new error_1.ValueError('duplicate key IDs found'); + } + if (threshold < 1) { + throw new error_1.ValueError('threshold must be at least 1'); + } + this.keyIDs = keyIDs; + this.threshold = threshold; + this.unrecognizedFields = unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Role)) { + return false; + } + return (this.threshold === other.threshold && + util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keyids: this.keyIDs, + threshold: this.threshold, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { keyids, threshold, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + return new Role({ + keyIDs: keyids, + threshold, + unrecognizedFields: rest, + }); + } +} +exports.Role = Role; +function hasDuplicates(array) { + return new Set(array).size !== array.length; +} +/** + * A container with information about a delegated role. + * + * A delegation can happen in two ways: + * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` + * - ``pathHashPrefixes`` is set: delegates targets whose target path hash + * starts with any of the prefixes in ``pathHashPrefixes`` + * + * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be + * set, at least one of them must be set. + */ +class DelegatedRole extends Role { + constructor(opts) { + super(opts); + const { name, terminating, paths, pathHashPrefixes } = opts; + this.name = name; + this.terminating = terminating; + if (opts.paths && opts.pathHashPrefixes) { + throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); + } + this.paths = paths; + this.pathHashPrefixes = pathHashPrefixes; + } + equals(other) { + if (!(other instanceof DelegatedRole)) { + return false; + } + return (super.equals(other) && + this.name === other.name && + this.terminating === other.terminating && + util_1.default.isDeepStrictEqual(this.paths, other.paths) && + util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); + } + isDelegatedPath(targetFilepath) { + if (this.paths) { + return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); + } + if (this.pathHashPrefixes) { + const hasher = crypto_1.default.createHash('sha256'); + const pathHash = hasher.update(targetFilepath).digest('hex'); + return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); + } + return false; + } + toJSON() { + const json = { + ...super.toJSON(), + name: this.name, + terminating: this.terminating, + }; + if (this.paths) { + json.paths = this.paths; + } + if (this.pathHashPrefixes) { + json.path_hash_prefixes = this.pathHashPrefixes; + } + return json; + } + static fromJSON(data) { + const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof name !== 'string') { + throw new TypeError('name must be a string'); + } + if (typeof terminating !== 'boolean') { + throw new TypeError('terminating must be a boolean'); + } + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { + throw new TypeError('paths must be an array of strings'); + } + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { + throw new TypeError('path_hash_prefixes must be an array of strings'); + } + return new DelegatedRole({ + keyIDs: keyids, + threshold, + name, + terminating, + paths, + pathHashPrefixes: path_hash_prefixes, + unrecognizedFields: rest, + }); + } +} +exports.DelegatedRole = DelegatedRole; +// JS version of Ruby's Array#zip +const zip = (a, b) => a.map((k, i) => [k, b[i]]); +function isTargetInPathPattern(target, pattern) { + const targetParts = target.split('/'); + const patternParts = pattern.split('/'); + if (patternParts.length != targetParts.length) { + return false; + } + return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); +} +/** + * Succinctly defines a hash bin delegation graph. + * + * A ``SuccinctRoles`` object describes a delegation graph that covers all + * targets, distributing them uniformly over the delegated roles (i.e. bins) + * in the graph. + * + * The total number of bins is 2 to the power of the passed ``bit_length``. + * + * Bin names are the concatenation of the passed ``name_prefix`` and a + * zero-padded hex representation of the bin index separated by a hyphen. + * + * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin + * is 'terminating'. + * + * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md + */ +class SuccinctRoles extends Role { + constructor(opts) { + super(opts); + const { bitLength, namePrefix } = opts; + if (bitLength <= 0 || bitLength > 32) { + throw new error_1.ValueError('bitLength must be between 1 and 32'); + } + this.bitLength = bitLength; + this.namePrefix = namePrefix; + // Calculate the suffix_len value based on the total number of bins in + // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will + // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 + // meaning the third bin will have a suffix of "003". + this.numberOfBins = Math.pow(2, bitLength); + // suffix_len is calculated based on "number_of_bins - 1" as the name + // of the last bin contains the number "number_of_bins -1" as a suffix. + this.suffixLen = (this.numberOfBins - 1).toString(16).length; + } + equals(other) { + if (!(other instanceof SuccinctRoles)) { + return false; + } + return (super.equals(other) && + this.bitLength === other.bitLength && + this.namePrefix === other.namePrefix); + } + /*** + * Calculates the name of the delegated role responsible for 'target_filepath'. + * + * The target at path ''target_filepath' is assigned to a bin by casting + * the left-most 'bit_length' of bits of the file path hash digest to + * int, using it as bin index between 0 and '2**bit_length - 1'. + * + * Args: + * target_filepath: URL path to a target file, relative to a base + * targets URL. + */ + getRoleForTarget(targetFilepath) { + const hasher = crypto_1.default.createHash('sha256'); + const hasherBuffer = hasher.update(targetFilepath).digest(); + // can't ever need more than 4 bytes (32 bits). + const hashBytes = hasherBuffer.subarray(0, 4); + // Right shift hash bytes, so that we only have the leftmost + // bit_length bits that we care about. + const shiftValue = 32 - this.bitLength; + const binNumber = hashBytes.readUInt32BE() >>> shiftValue; + // Add zero padding if necessary and cast to hex the suffix. + const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); + return `${this.namePrefix}-${suffix}`; + } + *getRoles() { + for (let i = 0; i < this.numberOfBins; i++) { + const suffix = i.toString(16).padStart(this.suffixLen, '0'); + yield `${this.namePrefix}-${suffix}`; + } + } + /*** + * Determines whether the given ``role_name`` is in one of + * the delegated roles that ``SuccinctRoles`` represents. + * + * Args: + * role_name: The name of the role to check against. + */ + isDelegatedRole(roleName) { + const desiredPrefix = this.namePrefix + '-'; + if (!roleName.startsWith(desiredPrefix)) { + return false; + } + const suffix = roleName.slice(desiredPrefix.length, roleName.length); + if (suffix.length != this.suffixLen) { + return false; + } + // make sure the suffix is a hex string + if (!suffix.match(/^[0-9a-fA-F]+$/)) { + return false; + } + const num = parseInt(suffix, 16); + return 0 <= num && num < this.numberOfBins; + } + toJSON() { + const json = { + ...super.toJSON(), + bit_length: this.bitLength, + name_prefix: this.namePrefix, + }; + return json; + } + static fromJSON(data) { + const { keyids, threshold, bit_length, name_prefix, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof bit_length !== 'number') { + throw new TypeError('bit_length must be a number'); + } + if (typeof name_prefix !== 'string') { + throw new TypeError('name_prefix must be a string'); + } + return new SuccinctRoles({ + keyIDs: keyids, + threshold, + bitLength: bit_length, + namePrefix: name_prefix, + unrecognizedFields: rest, + }); + } +} +exports.SuccinctRoles = SuccinctRoles; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js new file mode 100644 index 0000000000000..36d0ef0f186d1 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js @@ -0,0 +1,116 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Root = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of root metadata. + * + * The top-level role and metadata file signed by the root keys. + * This role specifies trusted keys for all other top-level roles, which may further delegate trust. + */ +class Root extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Root; + this.keys = options.keys || {}; + this.consistentSnapshot = options.consistentSnapshot ?? true; + if (!options.roles) { + this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ + ...acc, + [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), + }), {}); + } + else { + const roleNames = new Set(Object.keys(options.roles)); + if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { + throw new error_1.ValueError('missing top-level role'); + } + this.roles = options.roles; + } + } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } + equals(other) { + if (!(other instanceof Root)) { + return false; + } + return (super.equals(other) && + this.consistentSnapshot === other.consistentSnapshot && + util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles)); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + keys: keysToJSON(this.keys), + roles: rolesToJSON(this.roles), + consistent_snapshot: this.consistentSnapshot, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; + if (typeof consistent_snapshot !== 'boolean') { + throw new TypeError('consistent_snapshot must be a boolean'); + } + return new Root({ + ...commonFields, + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + consistentSnapshot: consistent_snapshot, + unrecognizedFields: rest, + }); + } +} +exports.Root = Root; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); +} +function rolesToJSON(roles) { + return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); +} +function keysFromJSON(data) { + let keys; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys must be an object'); + } + keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); + } + return keys; +} +function rolesFromJSON(data) { + let roles; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('roles must be an object'); + } + roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ + ...acc, + [roleName]: role_1.Role.fromJSON(roleData), + }), {}); + } + return roles; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js new file mode 100644 index 0000000000000..33eb204eb0835 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = void 0; +/** + * A container class containing information about a signature. + * + * Contains a signature and the keyid uniquely identifying the key used + * to generate the signature. + * + * Provide a `fromJSON` method to create a Signature from a JSON object. + */ +class Signature { + constructor(options) { + const { keyID, sig } = options; + this.keyID = keyID; + this.sig = sig; + } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } + static fromJSON(data) { + const { keyid, sig } = data; + if (typeof keyid !== 'string') { + throw new TypeError('keyid must be a string'); + } + if (typeof sig !== 'string') { + throw new TypeError('sig must be a string'); + } + return new Signature({ + keyID: keyid, + sig: sig, + }); + } +} +exports.Signature = Signature; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js new file mode 100644 index 0000000000000..e90ea8e729e4e --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js @@ -0,0 +1,71 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Snapshot = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of snapshot metadata. + * + * Snapshot contains information about all target Metadata files. + * A top-level role that specifies the latest versions of all targets metadata files, + * and hence the latest versions of all targets (including any dependencies between them) on the repository. + */ +class Snapshot extends base_1.Signed { + constructor(opts) { + super(opts); + this.type = base_1.MetadataKind.Snapshot; + this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; + } + equals(other) { + if (!(other instanceof Snapshot)) { + return false; + } + return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); + } + toJSON() { + return { + _type: this.type, + meta: metaToJSON(this.meta), + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Snapshot({ + ...commonFields, + meta: metaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Snapshot = Snapshot; +function metaToJSON(meta) { + return Object.entries(meta).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: metadata.toJSON(), + }), {}); +} +function metaFromJSON(data) { + let meta; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('meta field is malformed'); + } + else { + meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: file_1.MetaFile.fromJSON(metadata), + }), {}); + } + } + return meta; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js new file mode 100644 index 0000000000000..54bd8f8c554af --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js @@ -0,0 +1,92 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Targets = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const delegations_1 = require("./delegations"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +// Container for the signed part of targets metadata. +// +// Targets contains verifying information about target files and also delegates +// responsible to other Targets roles. +class Targets extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Targets; + this.targets = options.targets || {}; + this.delegations = options.delegations; + } + addTarget(target) { + this.targets[target.path] = target; + } + equals(other) { + if (!(other instanceof Targets)) { + return false; + } + return (super.equals(other) && + util_1.default.isDeepStrictEqual(this.targets, other.targets) && + util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); + } + toJSON() { + const json = { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + targets: targetsToJSON(this.targets), + ...this.unrecognizedFields, + }; + if (this.delegations) { + json.delegations = this.delegations.toJSON(); + } + return json; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { targets, delegations, ...rest } = unrecognizedFields; + return new Targets({ + ...commonFields, + targets: targetsFromJSON(targets), + delegations: delegationsFromJSON(delegations), + unrecognizedFields: rest, + }); + } +} +exports.Targets = Targets; +function targetsToJSON(targets) { + return Object.entries(targets).reduce((acc, [path, target]) => ({ + ...acc, + [path]: target.toJSON(), + }), {}); +} +function targetsFromJSON(data) { + let targets; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('targets must be an object'); + } + else { + targets = Object.entries(data).reduce((acc, [path, target]) => ({ + ...acc, + [path]: file_1.TargetFile.fromJSON(path, target), + }), {}); + } + } + return targets; +} +function delegationsFromJSON(data) { + let delegations; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { + throw new TypeError('delegations must be an object'); + } + else { + delegations = delegations_1.Delegations.fromJSON(data); + } + } + return delegations; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js new file mode 100644 index 0000000000000..9880c4c9fc254 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of timestamp metadata. + * + * A top-level that specifies the latest version of the snapshot role metadata file, + * and hence the latest versions of all metadata and targets on the repository. + */ +class Timestamp extends base_1.Signed { + constructor(options) { + super(options); + this.type = base_1.MetadataKind.Timestamp; + this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); + } + equals(other) { + if (!(other instanceof Timestamp)) { + return false; + } + return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Timestamp({ + ...commonFields, + snapshotMeta: snapshotMetaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Timestamp = Timestamp; +function snapshotMetaFromJSON(data) { + let snapshotMeta; + if (utils_1.guard.isDefined(data)) { + const snapshotData = data['snapshot.json']; + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { + throw new TypeError('missing snapshot.json in meta'); + } + else { + snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); + } + } + return snapshotMeta; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js new file mode 100644 index 0000000000000..efe558852303c --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; +function isDefined(val) { + return val !== undefined; +} +exports.isDefined = isDefined; +function isObject(value) { + return typeof value === 'object' && value !== null; +} +exports.isObject = isObject; +function isStringArray(value) { + return Array.isArray(value) && value.every((v) => typeof v === 'string'); +} +exports.isStringArray = isStringArray; +function isObjectArray(value) { + return Array.isArray(value) && value.every(isObject); +} +exports.isObjectArray = isObjectArray; +function isStringRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'string')); +} +exports.isStringRecord = isStringRecord; +function isObjectRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'object' && v !== null)); +} +exports.isObjectRecord = isObjectRecord; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js new file mode 100644 index 0000000000000..872aae28049c9 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.crypto = exports.guard = void 0; +exports.guard = __importStar(require("./guard")); +exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js new file mode 100644 index 0000000000000..1f795ba1a2733 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js @@ -0,0 +1,143 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPublicKey = void 0; +const crypto_1 = __importDefault(require("crypto")); +const error_1 = require("../error"); +const oid_1 = require("./oid"); +const ASN1_TAG_SEQUENCE = 0x30; +const ANS1_TAG_BIT_STRING = 0x03; +const NULL_BYTE = 0x00; +const OID_EDDSA = '1.3.101.112'; +const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; +const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; +const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; +function getPublicKey(keyInfo) { + switch (keyInfo.keyType) { + case 'rsa': + return getRSAPublicKey(keyInfo); + case 'ed25519': + return getED25519PublicKey(keyInfo); + case 'ecdsa': + case 'ecdsa-sha2-nistp256': + case 'ecdsa-sha2-nistp384': + return getECDCSAPublicKey(keyInfo); + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); + } +} +exports.getPublicKey = getPublicKey; +function getRSAPublicKey(keyInfo) { + // Only support PEM-encoded RSA keys + if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { + throw new error_1.CryptoError('Invalid key format'); + } + const key = crypto_1.default.createPublicKey(keyInfo.keyVal); + switch (keyInfo.scheme) { + case 'rsassa-pss-sha256': + return { + key: key, + padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, + }; + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); + } +} +function getED25519PublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ed25519.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +function getECDCSAPublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ecdsa.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +const ed25519 = { + // Translates a hex key into a crypto KeyObject + // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); + // Create a byte sequence containing the OID and key + const elements = Buffer.concat([ + Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oid.length]), + oid, + ]), + Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]), + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([elements.length]), + elements, + ]); + return der; + }, +}; +const ecdsa = { + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const bitString = Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]); + const oids = Buffer.concat([ + (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), + (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), + ]); + const oidSequence = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oids.length]), + oids, + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oidSequence.length + bitString.length]), + oidSequence, + bitString, + ]); + return der; + }, +}; +const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js new file mode 100644 index 0000000000000..e1bb7af5e54fb --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.encodeOIDString = void 0; +const ANS1_TAG_OID = 0x06; +function encodeOIDString(oid) { + const parts = oid.split('.'); + // The first two subidentifiers are encoded into the first byte + const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); + const rest = []; + parts.slice(2).forEach((part) => { + const bytes = encodeVariableLengthInteger(parseInt(part, 10)); + rest.push(...bytes); + }); + const der = Buffer.from([first, ...rest]); + return Buffer.from([ANS1_TAG_OID, der.length, ...der]); +} +exports.encodeOIDString = encodeOIDString; +function encodeVariableLengthInteger(value) { + const bytes = []; + let mask = 0x00; + while (value > 0) { + bytes.unshift((value & 0x7f) | mask); + value >>= 7; + mask = 0x80; + } + return bytes; +} diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js new file mode 100644 index 0000000000000..8232b6f6a97ab --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js @@ -0,0 +1,13 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySignature = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const crypto_1 = __importDefault(require("crypto")); +const verifySignature = (metaDataSignedData, key, signature) => { + const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); + return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); +}; +exports.verifySignature = verifySignature; diff --git a/node_modules/sigstore/node_modules/@tufjs/models/package.json b/node_modules/sigstore/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000000..6711ee0dababc --- /dev/null +++ b/node_modules/sigstore/node_modules/@tufjs/models/package.json @@ -0,0 +1,41 @@ +{ + "name": "@tufjs/models", + "version": "1.0.4", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", + "devDependencies": { + "@types/node": "^18.16.3", + "typescript": "^5.0.4" + }, + "dependencies": { + "@tufjs/canonical-json": "1.0.0", + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/sigstore/node_modules/tuf-js/LICENSE b/node_modules/sigstore/node_modules/tuf-js/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/config.js b/node_modules/sigstore/node_modules/tuf-js/dist/config.js new file mode 100644 index 0000000000000..c2d970e256244 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/config.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultConfig = void 0; +exports.defaultConfig = { + maxRootRotations: 32, + maxDelegations: 32, + rootMaxLength: 512000, + timestampMaxLength: 16384, + snapshotMaxLength: 2000000, + targetsMaxLength: 5000000, + prefixTargetsWithHash: true, + fetchTimeout: 100000, + fetchRetries: 2, +}; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/error.js b/node_modules/sigstore/node_modules/tuf-js/dist/error.js new file mode 100644 index 0000000000000..f4b10fa202895 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/error.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +class RuntimeError extends Error { +} +exports.RuntimeError = RuntimeError; +class PersistError extends Error { +} +exports.PersistError = PersistError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error for metadata that contains an invalid version number. +class BadVersionError extends RepositoryError { +} +exports.BadVersionError = BadVersionError; +// An error for metadata containing a previously verified version number. +class EqualVersionError extends BadVersionError { +} +exports.EqualVersionError = EqualVersionError; +// Indicate that a TUF Metadata file has expired. +class ExpiredMetadataError extends RepositoryError { +} +exports.ExpiredMetadataError = ExpiredMetadataError; +//----- Download Errors ------------------------------------------------------- +// An error occurred while attempting to download a file. +class DownloadError extends Error { +} +exports.DownloadError = DownloadError; +// Indicate that a mismatch of lengths was seen while downloading a file +class DownloadLengthMismatchError extends DownloadError { +} +exports.DownloadLengthMismatchError = DownloadLengthMismatchError; +// Returned by FetcherInterface implementations for HTTP errors. +class DownloadHTTPError extends DownloadError { + constructor(message, statusCode) { + super(message); + this.statusCode = statusCode; + } +} +exports.DownloadHTTPError = DownloadHTTPError; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js new file mode 100644 index 0000000000000..d3dcf53eeb869 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js @@ -0,0 +1,84 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultFetcher = exports.BaseFetcher = void 0; +const debug_1 = __importDefault(require("debug")); +const fs_1 = __importDefault(require("fs")); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const tmpfile_1 = require("./utils/tmpfile"); +const log = (0, debug_1.default)('tuf:fetch'); +class BaseFetcher { + // Download file from given URL. The file is downloaded to a temporary + // location and then passed to the given handler. The handler is responsible + // for moving the file to its final location. The temporary file is deleted + // after the handler returns. + async downloadFile(url, maxLength, handler) { + return (0, tmpfile_1.withTempFile)(async (tmpFile) => { + const reader = await this.fetch(url); + let numberOfBytesReceived = 0; + const fileStream = fs_1.default.createWriteStream(tmpFile); + // Read the stream a chunk at a time so that we can check + // the length of the file as we go + try { + for await (const chunk of reader) { + const bufferChunk = Buffer.from(chunk); + numberOfBytesReceived += bufferChunk.length; + if (numberOfBytesReceived > maxLength) { + throw new error_1.DownloadLengthMismatchError('Max length reached'); + } + await writeBufferToStream(fileStream, bufferChunk); + } + } + finally { + // Make sure we always close the stream + await util_1.default.promisify(fileStream.close).bind(fileStream)(); + } + return handler(tmpFile); + }); + } + // Download bytes from given URL. + async downloadBytes(url, maxLength) { + return this.downloadFile(url, maxLength, async (file) => { + const stream = fs_1.default.createReadStream(file); + const chunks = []; + for await (const chunk of stream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); + }); + } +} +exports.BaseFetcher = BaseFetcher; +class DefaultFetcher extends BaseFetcher { + constructor(options = {}) { + super(); + this.timeout = options.timeout; + this.retries = options.retries; + } + async fetch(url) { + log('GET %s', url); + const response = await (0, make_fetch_happen_1.default)(url, { + timeout: this.timeout, + retry: this.retries, + }); + if (!response.ok || !response?.body) { + throw new error_1.DownloadHTTPError('Failed to download', response.status); + } + return response.body; + } +} +exports.DefaultFetcher = DefaultFetcher; +const writeBufferToStream = async (stream, buffer) => { + return new Promise((resolve, reject) => { + stream.write(buffer, (err) => { + if (err) { + reject(err); + } + resolve(true); + }); + }); +}; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/index.js b/node_modules/sigstore/node_modules/tuf-js/dist/index.js new file mode 100644 index 0000000000000..5a83b91f355d8 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; +var models_1 = require("@tufjs/models"); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); +var fetcher_1 = require("./fetcher"); +Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); +var updater_1 = require("./updater"); +Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/store.js b/node_modules/sigstore/node_modules/tuf-js/dist/store.js new file mode 100644 index 0000000000000..8567336108709 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/store.js @@ -0,0 +1,208 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TrustedMetadataStore = void 0; +const models_1 = require("@tufjs/models"); +const error_1 = require("./error"); +class TrustedMetadataStore { + constructor(rootData) { + this.trustedSet = {}; + // Client workflow 5.1: record fixed update start time + this.referenceTime = new Date(); + // Client workflow 5.2: load trusted root metadata + this.loadTrustedRoot(rootData); + } + get root() { + if (!this.trustedSet.root) { + throw new ReferenceError('No trusted root metadata'); + } + return this.trustedSet.root; + } + get timestamp() { + return this.trustedSet.timestamp; + } + get snapshot() { + return this.trustedSet.snapshot; + } + get targets() { + return this.trustedSet.targets; + } + getRole(name) { + return this.trustedSet[name]; + } + updateRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (newRoot.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); + } + // Client workflow 5.4: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.5: check for rollback attack + if (newRoot.signed.version != this.root.signed.version + 1) { + throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); + } + // Check that new root is signed by self + newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); + // Client workflow 5.7: set new root as trusted root + this.trustedSet.root = newRoot; + return newRoot; + } + updateTimestamp(bytesBuffer) { + if (this.snapshot) { + throw new error_1.RuntimeError('Cannot update timestamp after snapshot'); + } + if (this.root.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final root.json is expired'); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); + if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { + throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); + } + // Client workflow 5.4.2: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); + if (this.timestamp) { + // Prevent rolling back timestamp version + // Client workflow 5.4.3.1: check for rollback attack + if (newTimestamp.signed.version < this.timestamp.signed.version) { + throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`); + } + // Keep using old timestamp if versions are equal. + if (newTimestamp.signed.version === this.timestamp.signed.version) { + throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`); + } + // Prevent rolling back snapshot version + // Client workflow 5.4.3.2: check for rollback attack + const snapshotMeta = this.timestamp.signed.snapshotMeta; + const newSnapshotMeta = newTimestamp.signed.snapshotMeta; + if (newSnapshotMeta.version < snapshotMeta.version) { + throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`); + } + } + // expiry not checked to allow old timestamp to be used for rollback + // protection of new timestamp: expiry is checked in update_snapshot + this.trustedSet.timestamp = newTimestamp; + // Client workflow 5.4.4: check for freeze attack + this.checkFinalTimestamp(); + return newTimestamp; + } + updateSnapshot(bytesBuffer, trusted = false) { + if (!this.timestamp) { + throw new error_1.RuntimeError('Cannot update snapshot before timestamp'); + } + if (this.targets) { + throw new error_1.RuntimeError('Cannot update snapshot after targets'); + } + // Snapshot cannot be loaded if final timestamp is expired + this.checkFinalTimestamp(); + const snapshotMeta = this.timestamp.signed.snapshotMeta; + // Verify non-trusted data against the hashes in timestamp, if any. + // Trusted snapshot data has already been verified once. + // Client workflow 5.5.2: check against timestamp role's snaphsot hash + if (!trusted) { + snapshotMeta.verify(bytesBuffer); + } + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); + if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { + throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); + } + // Client workflow 5.5.3: check for arbitrary software attack + this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); + // version check against meta version (5.5.4) is deferred to allow old + // snapshot to be used in rollback protection + // Client workflow 5.5.5: check for rollback attack + if (this.snapshot) { + Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => { + const newFileInfo = newSnapshot.signed.meta[fileName]; + if (!newFileInfo) { + throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`); + } + if (newFileInfo.version < fileInfo.version) { + throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`); + } + }); + } + this.trustedSet.snapshot = newSnapshot; + // snapshot is loaded, but we raise if it's not valid _final_ snapshot + // Client workflow 5.5.4 & 5.5.6 + this.checkFinalSnapsnot(); + return newSnapshot; + } + updateDelegatedTargets(bytesBuffer, roleName, delegatorName) { + if (!this.snapshot) { + throw new error_1.RuntimeError('Cannot update delegated targets before snapshot'); + } + // Targets cannot be loaded if final snapshot is expired or its version + // does not match meta version in timestamp. + this.checkFinalSnapsnot(); + const delegator = this.trustedSet[delegatorName]; + if (!delegator) { + throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`); + } + // Extract metadata for the delegated role from snapshot + const meta = this.snapshot.signed.meta?.[`${roleName}.json`]; + if (!meta) { + throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`); + } + // Client workflow 5.6.2: check against snapshot role's targets hash + meta.verify(bytesBuffer); + const data = JSON.parse(bytesBuffer.toString('utf8')); + const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); + if (newDelegate.signed.type != models_1.MetadataKind.Targets) { + throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); + } + // Client workflow 5.6.3: check for arbitrary software attack + delegator.verifyDelegate(roleName, newDelegate); + // Client workflow 5.6.4: Check against snapshot role’s targets version + const version = newDelegate.signed.version; + if (version != meta.version) { + throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`); + } + // Client workflow 5.6.5: check for a freeze attack + if (newDelegate.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`); + } + this.trustedSet[roleName] = newDelegate; + } + // Verifies and loads data as trusted root metadata. + // Note that an expired initial root is still considered valid. + loadTrustedRoot(bytesBuffer) { + const data = JSON.parse(bytesBuffer.toString('utf8')); + const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (root.signed.type != models_1.MetadataKind.Root) { + throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); + } + root.verifyDelegate(models_1.MetadataKind.Root, root); + this.trustedSet['root'] = root; + } + checkFinalTimestamp() { + // Timestamp MUST be loaded + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.4.4: check for freeze attack + if (this.timestamp.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('Final timestamp.json is expired'); + } + } + checkFinalSnapsnot() { + // Snapshot and timestamp MUST be loaded + if (!this.snapshot) { + throw new ReferenceError('No trusted snapshot metadata'); + } + if (!this.timestamp) { + throw new ReferenceError('No trusted timestamp metadata'); + } + // Client workflow 5.5.6: check for freeze attack + if (this.snapshot.signed.isExpired(this.referenceTime)) { + throw new error_1.ExpiredMetadataError('snapshot.json is expired'); + } + // Client workflow 5.5.4: check against timestamp role’s snapshot version + const snapshotMeta = this.timestamp.signed.snapshotMeta; + if (this.snapshot.signed.version !== snapshotMeta.version) { + throw new error_1.BadVersionError("Snapshot version doesn't match timestamp"); + } + } +} +exports.TrustedMetadataStore = TrustedMetadataStore; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js new file mode 100644 index 0000000000000..2aba48d24affd --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js @@ -0,0 +1,320 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Updater = void 0; +const models_1 = require("@tufjs/models"); +const debug_1 = __importDefault(require("debug")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const config_1 = require("./config"); +const error_1 = require("./error"); +const fetcher_1 = require("./fetcher"); +const store_1 = require("./store"); +const url = __importStar(require("./utils/url")); +const log = (0, debug_1.default)('tuf:cache'); +class Updater { + constructor(options) { + const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; + this.dir = metadataDir; + this.metadataBaseUrl = metadataBaseUrl; + this.targetDir = targetDir; + this.targetBaseUrl = targetBaseUrl; + const data = this.loadLocalMetadata(models_1.MetadataKind.Root); + this.trustedSet = new store_1.TrustedMetadataStore(data); + this.config = { ...config_1.defaultConfig, ...config }; + this.fetcher = + fetcher || + new fetcher_1.DefaultFetcher({ + timeout: this.config.fetchTimeout, + retries: this.config.fetchRetries, + }); + } + // refresh and load the metadata before downloading the target + // refresh should be called once after the client is initialized + async refresh() { + await this.loadRoot(); + await this.loadTimestamp(); + await this.loadSnapshot(); + await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); + } + // Returns the TargetFile instance with information for the given target path. + // + // Implicitly calls refresh if it hasn't already been called. + async getTargetInfo(targetPath) { + if (!this.trustedSet.targets) { + await this.refresh(); + } + return this.preorderDepthFirstWalk(targetPath); + } + async downloadTarget(targetInfo, filePath, targetBaseUrl) { + const targetPath = filePath || this.generateTargetPath(targetInfo); + if (!targetBaseUrl) { + if (!this.targetBaseUrl) { + throw new error_1.ValueError('Target base URL not set'); + } + targetBaseUrl = this.targetBaseUrl; + } + let targetFilePath = targetInfo.path; + const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot; + if (consistentSnapshot && this.config.prefixTargetsWithHash) { + const hashes = Object.values(targetInfo.hashes); + const { dir, base } = path.parse(targetFilePath); + const filename = `${hashes[0]}.${base}`; + targetFilePath = dir ? `${dir}/${filename}` : filename; + } + const targetUrl = url.join(targetBaseUrl, targetFilePath); + // Client workflow 5.7.3: download target file + await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => { + // Verify hashes and length of downloaded file + await targetInfo.verify(fs.createReadStream(fileName)); + // Copy file to target path + log('WRITE %s', targetPath); + fs.copyFileSync(fileName, targetPath); + }); + return targetPath; + } + async findCachedTarget(targetInfo, filePath) { + if (!filePath) { + filePath = this.generateTargetPath(targetInfo); + } + try { + if (fs.existsSync(filePath)) { + await targetInfo.verify(fs.createReadStream(filePath)); + return filePath; + } + } + catch (error) { + return; // File not found + } + return; // File not found + } + loadLocalMetadata(fileName) { + const filePath = path.join(this.dir, `${fileName}.json`); + log('READ %s', filePath); + return fs.readFileSync(filePath); + } + // Sequentially load and persist on local disk every newer root metadata + // version available on the remote. + // Client workflow 5.3: update root role + async loadRoot() { + // Client workflow 5.3.2: version of trusted root metadata file + const rootVersion = this.trustedSet.root.signed.version; + const lowerBound = rootVersion + 1; + const upperBound = lowerBound + this.config.maxRootRotations; + for (let version = lowerBound; version <= upperBound; version++) { + const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`); + try { + // Client workflow 5.3.3: download new root metadata file + const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength); + // Client workflow 5.3.4 - 5.4.7 + this.trustedSet.updateRoot(bytesData); + // Client workflow 5.3.8: persist root metadata file + this.persistMetadata(models_1.MetadataKind.Root, bytesData); + } + catch (error) { + break; + } + } + } + // Load local and remote timestamp metadata. + // Client workflow 5.4: update timestamp role + async loadTimestamp() { + // Load local and remote timestamp metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); + this.trustedSet.updateTimestamp(data); + } + catch (error) { + // continue + } + //Load from remote (whether local load succeeded or not) + const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json'); + // Client workflow 5.4.1: download timestamp metadata file + const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength); + try { + // Client workflow 5.4.2 - 5.4.4 + this.trustedSet.updateTimestamp(bytesData); + } + catch (error) { + // If new timestamp version is same as current, discardd the new one. + // This is normal and should NOT raise an error. + if (error instanceof error_1.EqualVersionError) { + return; + } + // Re-raise any other error + throw error; + } + // Client workflow 5.4.5: persist timestamp metadata + this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); + } + // Load local and remote snapshot metadata. + // Client workflow 5.5: update snapshot role + async loadSnapshot() { + //Load local (and if needed remote) snapshot metadata + try { + const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); + this.trustedSet.updateSnapshot(data, true); + } + catch (error) { + if (!this.trustedSet.timestamp) { + throw new ReferenceError('No timestamp metadata'); + } + const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta; + const maxLength = snapshotMeta.length || this.config.snapshotMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? snapshotMeta.version + : undefined; + const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json'); + try { + // Client workflow 5.5.1: download snapshot metadata file + const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength); + // Client workflow 5.5.2 - 5.5.6 + this.trustedSet.updateSnapshot(bytesData); + // Client workflow 5.5.7: persist snapshot metadata file + this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); + } + } + } + // Load local and remote targets metadata. + // Client workflow 5.6: update targets role + async loadTargets(role, parentRole) { + if (this.trustedSet.getRole(role)) { + return this.trustedSet.getRole(role); + } + try { + const buffer = this.loadLocalMetadata(role); + this.trustedSet.updateDelegatedTargets(buffer, role, parentRole); + } + catch (error) { + // Local 'role' does not exist or is invalid: update from remote + if (!this.trustedSet.snapshot) { + throw new ReferenceError('No snapshot metadata'); + } + const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`]; + // TODO: use length for fetching + const maxLength = metaInfo.length || this.config.targetsMaxLength; + const version = this.trustedSet.root.signed.consistentSnapshot + ? metaInfo.version + : undefined; + const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`); + try { + // Client workflow 5.6.1: download targets metadata file + const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength); + // Client workflow 5.6.2 - 5.6.6 + this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole); + // Client workflow 5.6.7: persist targets metadata file + this.persistMetadata(role, bytesData); + } + catch (error) { + throw new error_1.RuntimeError(`Unable to load targets error ${error}`); + } + } + return this.trustedSet.getRole(role); + } + async preorderDepthFirstWalk(targetPath) { + // Interrogates the tree of target delegations in order of appearance + // (which implicitly order trustworthiness), and returns the matching + // target found in the most trusted role. + // List of delegations to be interrogated. A (role, parent role) pair + // is needed to load and verify the delegated targets metadata. + const delegationsToVisit = [ + { + roleName: models_1.MetadataKind.Targets, + parentRoleName: models_1.MetadataKind.Root, + }, + ]; + const visitedRoleNames = new Set(); + // Client workflow 5.6.7: preorder depth-first traversal of the graph of + // target delegations + while (visitedRoleNames.size <= this.config.maxDelegations && + delegationsToVisit.length > 0) { + // Pop the role name from the top of the stack. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const { roleName, parentRoleName } = delegationsToVisit.pop(); + // Skip any visited current role to prevent cycles. + // Client workflow 5.6.7.1: skip already-visited roles + if (visitedRoleNames.has(roleName)) { + continue; + } + // The metadata for 'role_name' must be downloaded/updated before + // its targets, delegations, and child roles can be inspected. + const targets = (await this.loadTargets(roleName, parentRoleName)) + ?.signed; + if (!targets) { + continue; + } + const target = targets.targets?.[targetPath]; + if (target) { + return target; + } + // After preorder check, add current role to set of visited roles. + visitedRoleNames.add(roleName); + if (targets.delegations) { + const childRolesToVisit = []; + // NOTE: This may be a slow operation if there are many delegated roles. + const rolesForTarget = targets.delegations.rolesForTarget(targetPath); + for (const { role: childName, terminating } of rolesForTarget) { + childRolesToVisit.push({ + roleName: childName, + parentRoleName: roleName, + }); + // Client workflow 5.6.7.2.1 + if (terminating) { + delegationsToVisit.splice(0); // empty the array + break; + } + } + childRolesToVisit.reverse(); + delegationsToVisit.push(...childRolesToVisit); + } + } + return; // no matching target found + } + generateTargetPath(targetInfo) { + if (!this.targetDir) { + throw new error_1.ValueError('Target directory not set'); + } + // URL encode target path + const filePath = encodeURIComponent(targetInfo.path); + return path.join(this.targetDir, filePath); + } + async persistMetadata(metaDataName, bytesData) { + try { + const filePath = path.join(this.dir, `${metaDataName}.json`); + log('WRITE %s', filePath); + fs.writeFileSync(filePath, bytesData.toString('utf8')); + } + catch (error) { + throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`); + } + } +} +exports.Updater = Updater; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js new file mode 100644 index 0000000000000..923eef6044bcc --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js @@ -0,0 +1,25 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.withTempFile = void 0; +const promises_1 = __importDefault(require("fs/promises")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +// Invokes the given handler with the path to a temporary file. The file +// is deleted after the handler returns. +const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile'))); +exports.withTempFile = withTempFile; +// Invokes the given handler with a temporary directory. The directory is +// deleted after the handler returns. +const withTempDir = async (handler) => { + const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir()); + const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep); + try { + return await handler(dir); + } + finally { + await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 }); + } +}; diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js new file mode 100644 index 0000000000000..ce67fe2c23053 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.join = void 0; +const url_1 = require("url"); +function join(base, path) { + return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString(); +} +exports.join = join; +function ensureTrailingSlash(path) { + return path.endsWith('/') ? path : path + '/'; +} +function removeLeadingSlash(path) { + return path.startsWith('/') ? path.slice(1) : path; +} diff --git a/node_modules/sigstore/node_modules/tuf-js/package.json b/node_modules/sigstore/node_modules/tuf-js/package.json new file mode 100644 index 0000000000000..9187d88083272 --- /dev/null +++ b/node_modules/sigstore/node_modules/tuf-js/package.json @@ -0,0 +1,46 @@ +{ + "name": "tuf-js", + "version": "1.1.7", + "description": "JavaScript implementation of The Update Framework (TUF)", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "files": [ + "dist" + ], + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", + "devDependencies": { + "@tufjs/repo-mock": "1.3.1", + "@types/debug": "^4.1.8", + "@types/make-fetch-happen": "^10.0.1", + "@types/node": "^20.2.5", + "nock": "^13.3.1", + "typescript": "^5.1.3" + }, + "dependencies": { + "@tufjs/models": "1.0.4", + "debug": "^4.3.4", + "make-fetch-happen": "^11.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/tuf-js/dist/config.js b/node_modules/tuf-js/dist/config.js index c2d970e256244..bafb33a8a1bf7 100644 --- a/node_modules/tuf-js/dist/config.js +++ b/node_modules/tuf-js/dist/config.js @@ -10,5 +10,6 @@ exports.defaultConfig = { targetsMaxLength: 5000000, prefixTargetsWithHash: true, fetchTimeout: 100000, - fetchRetries: 2, + fetchRetries: undefined, + fetchRetry: 2, }; diff --git a/node_modules/tuf-js/dist/fetcher.js b/node_modules/tuf-js/dist/fetcher.js index d3dcf53eeb869..f966ce1bb0cdc 100644 --- a/node_modules/tuf-js/dist/fetcher.js +++ b/node_modules/tuf-js/dist/fetcher.js @@ -57,13 +57,13 @@ class DefaultFetcher extends BaseFetcher { constructor(options = {}) { super(); this.timeout = options.timeout; - this.retries = options.retries; + this.retry = options.retry; } async fetch(url) { log('GET %s', url); const response = await (0, make_fetch_happen_1.default)(url, { timeout: this.timeout, - retry: this.retries, + retry: this.retry, }); if (!response.ok || !response?.body) { throw new error_1.DownloadHTTPError('Failed to download', response.status); diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js index 2aba48d24affd..2d0c769c7af64 100644 --- a/node_modules/tuf-js/dist/updater.js +++ b/node_modules/tuf-js/dist/updater.js @@ -51,7 +51,7 @@ class Updater { fetcher || new fetcher_1.DefaultFetcher({ timeout: this.config.fetchTimeout, - retries: this.config.fetchRetries, + retry: this.config.fetchRetries ?? this.config.fetchRetry, }); } // refresh and load the metadata before downloading the target @@ -306,7 +306,7 @@ class Updater { const filePath = encodeURIComponent(targetInfo.path); return path.join(this.targetDir, filePath); } - async persistMetadata(metaDataName, bytesData) { + persistMetadata(metaDataName, bytesData) { try { const filePath = path.join(this.dir, `${metaDataName}.json`); log('WRITE %s', filePath); diff --git a/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/node_modules/tuf-js/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js deleted file mode 100644 index f41b539df65dc..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,166 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // just stat to ensure it exists - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js deleted file mode 100644 index 7146146581287..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,205 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 722a37af5ce15..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,330 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await Promise.all(buckets.map(async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await Promise.all(subbuckets.map(async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await Promise.all(subbucketEntries.map(async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - })) - })) - })) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/get.js b/node_modules/tuf-js/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/tuf-js/node_modules/cacache/lib/index.js b/node_modules/tuf-js/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 0ff604a479c9c..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MEMOIZED = new LRU({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/put.js b/node_modules/tuf-js/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/node_modules/tuf-js/node_modules/cacache/lib/verify.js deleted file mode 100644 index 62e85c946490f..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime (cache, opts) { - return { startTime: new Date() } -} - -async function markEndTime (cache, opts) { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats, opts) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE deleted file mode 100644 index 97f8e32ed82e4..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index b6cdae8eb514b..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const events_1 = require("events"); -const stream_1 = __importDefault(require("stream")); -const string_decoder_1 = require("string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b65fafbae43a4..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'events'; -import Stream from 'stream'; -import { StringDecoder } from 'string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json deleted file mode 100644 index 6faaa247a5bc6..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.3", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/tuf-js/node_modules/cacache/package.json b/node_modules/tuf-js/node_modules/cacache/package.json deleted file mode 100644 index ab58cb8b7c50f..0000000000000 --- a/node_modules/tuf-js/node_modules/cacache/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "cacache", - "version": "17.1.4", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.js\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^7.0.3", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.18.0", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/tuf-js/node_modules/lru-cache/LICENSE b/node_modules/tuf-js/node_modules/lru-cache/LICENSE deleted file mode 100644 index f785757cd63f8..0000000000000 --- a/node_modules/tuf-js/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.js b/node_modules/tuf-js/node_modules/lru-cache/index.js deleted file mode 100644 index 48e99fe5e5a70..0000000000000 --- a/node_modules/tuf-js/node_modules/lru-cache/index.js +++ /dev/null @@ -1,1227 +0,0 @@ -const perf = - typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date - -const hasAbortController = typeof AbortController === 'function' - -// minimal backwards-compatibility polyfill -// this doesn't have nearly all the checks and whatnot that -// actual AbortController/Signal has, but it's enough for -// our purposes, and if used properly, behaves the same. -const AC = hasAbortController - ? AbortController - : class AbortController { - constructor() { - this.signal = new AS() - } - abort(reason = new Error('This operation was aborted')) { - this.signal.reason = this.signal.reason || reason - this.signal.aborted = true - this.signal.dispatchEvent({ - type: 'abort', - target: this.signal, - }) - } - } - -const hasAbortSignal = typeof AbortSignal === 'function' -// Some polyfills put this on the AC class, not global -const hasACAbortSignal = typeof AC.AbortSignal === 'function' -const AS = hasAbortSignal - ? AbortSignal - : hasACAbortSignal - ? AC.AbortController - : class AbortSignal { - constructor() { - this.reason = undefined - this.aborted = false - this._listeners = [] - } - dispatchEvent(e) { - if (e.type === 'abort') { - this.aborted = true - this.onabort(e) - this._listeners.forEach(f => f(e), this) - } - } - onabort() {} - addEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners.push(fn) - } - } - removeEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners = this._listeners.filter(f => f !== fn) - } - } - } - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} - -const emitWarning = (...a) => { - typeof process === 'object' && - process && - typeof process.emitWarning === 'function' - ? process.emitWarning(...a) - : console.error(...a) -} - -const shouldWarn = code => !warned.has(code) - -const warn = (code, what, instead, fn) => { - warned.add(code) - const msg = `The ${what} is deprecated. Please use ${instead} instead.` - emitWarning(msg, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => - !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null - -class ZeroArray extends Array { - constructor(size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor(max) { - if (max === 0) { - return [] - } - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push(n) { - this.heap[this.length++] = n - } - pop() { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor(options = {}) { - const { - max = 0, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - updateAgeOnHas, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize = 0, - maxEntrySize = 0, - sizeCalculation, - fetchMethod, - fetchContext, - noDeleteOnFetchRejection, - noDeleteOnStaleGet, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { length, maxAge, stale } = - options instanceof LRUCache ? {} : options - - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer') - } - - const UintArray = max ? getUintArray(max) : Array - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize - this.maxEntrySize = maxEntrySize || this.maxSize - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize && !this.maxEntrySize) { - throw new TypeError( - 'cannot set sizeCalculation without setting maxSize or maxEntrySize' - ) - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function') - } - } - - this.fetchMethod = fetchMethod || null - if (this.fetchMethod && typeof this.fetchMethod !== 'function') { - throw new TypeError( - 'fetchMethod must be a function if specified' - ) - } - - this.fetchContext = fetchContext - if (!this.fetchMethod && fetchContext !== undefined) { - throw new TypeError( - 'cannot set fetchContext without fetchMethod' - ) - } - - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort - this.ignoreFetchAbort = !!ignoreFetchAbort - - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.maxSize !== 0) { - if (!isPosInt(this.maxSize)) { - throw new TypeError( - 'maxSize must be a positive integer if specified' - ) - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError( - 'maxEntrySize must be a positive integer if specified' - ) - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet - this.updateAgeOnGet = !!updateAgeOnGet - this.updateAgeOnHas = !!updateAgeOnHas - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError( - 'ttl must be a positive integer if specified' - ) - } - this.initializeTTLTracking() - } - - // do not allow completely unbounded caches - if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { - throw new TypeError( - 'At least one of max, maxSize, or ttl is required' - ) - } - if (!this.ttlAutopurge && !this.max && !this.maxSize) { - const code = 'LRU_CACHE_UNBOUNDED' - if (shouldWarn(code)) { - warned.add(code) - const msg = - 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.' - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) - } - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - getRemainingTTL(key) { - return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 - } - - initializeTTLTracking() { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - - this.setItemTTL = (index, ttl, start = perf.now()) => { - this.starts[index] = ttl !== 0 ? start : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - - this.updateItemAge = index => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - - this.statusTTL = (status, index) => { - if (status) { - status.ttl = this.ttls[index] - status.start = this.starts[index] - status.now = cachedNow || getNow() - status.remainingTTL = status.now + status.ttl - status.start - } - } - - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout( - () => (cachedNow = 0), - this.ttlResolution - ) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - - this.getRemainingTTL = key => { - const index = this.keyMap.get(key) - if (index === undefined) { - return 0 - } - return this.ttls[index] === 0 || this.starts[index] === 0 - ? Infinity - : this.starts[index] + - this.ttls[index] - - (cachedNow || getNow()) - } - - this.isStale = index => { - return ( - this.ttls[index] !== 0 && - this.starts[index] !== 0 && - (cachedNow || getNow()) - this.starts[index] > - this.ttls[index] - ) - } - } - updateItemAge(_index) {} - statusTTL(_status, _index) {} - setItemTTL(_index, _ttl, _start) {} - isStale(_index) { - return false - } - - initializeSizeTracking() { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => { - this.calculatedSize -= this.sizes[index] - this.sizes[index] = 0 - } - this.requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.isBackgroundFetch(v)) { - return 0 - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function') - } - size = sizeCalculation(v, k) - if (!isPosInt(size)) { - throw new TypeError( - 'sizeCalculation return invalid (expect positive integer)' - ) - } - } else { - throw new TypeError( - 'invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + - 'must be set.' - ) - } - } - return size - } - this.addItemSize = (index, size, status) => { - this.sizes[index] = size - if (this.maxSize) { - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict(true) - } - } - this.calculatedSize += this.sizes[index] - if (status) { - status.entrySize = size - status.totalCalculatedSize = this.calculatedSize - } - } - } - removeItemSize(_index) {} - addItemSize(_index, _size) {} - requireSize(_k, _v, size, sizeCalculation) { - if (size || sizeCalculation) { - throw new TypeError( - 'cannot set size without setting maxSize or maxEntrySize on cache' - ) - } - } - - *indexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex(index) { - return ( - index !== undefined && - this.keyMap.get(this.keyList[index]) === index - ) - } - - *entries() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - *rentries() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - - *keys() { - for (const i of this.indexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - *rkeys() { - for (const i of this.rindexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - - *values() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - *rvalues() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - - [Symbol.iterator]() { - return this.entries() - } - - find(fn, getOptions) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - if (fn(value, this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach(fn, thisp = this) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - rforEach(fn, thisp = this) { - for (const i of this.rindexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - get prune() { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale() { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump() { - const arr = [] - for (const i of this.indexes({ allowStale: true })) { - const key = this.keyList[i] - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.starts[i] - entry.start = Math.floor(Date.now() - age) - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load(arr) { - this.clear() - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset. - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start - entry.start = perf.now() - age - } - this.set(key, entry.value, entry) - } - } - - dispose(_v, _k, _reason) {} - - set( - k, - v, - { - ttl = this.ttl, - start, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - status, - } = {} - ) { - size = this.requireSize(k, v, size, sizeCalculation) - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss' - status.maxEntrySizeExceeded = true - } - // have to delete, in case a background fetch is there already. - // in non-async cases, this is a no-op - this.delete(k) - return this - } - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size++ - this.addItemSize(index, size, status) - if (status) { - status.set = 'add' - } - noUpdateTTL = false - } else { - // update - this.moveToTail(index) - const oldVal = this.valList[index] - if (v !== oldVal) { - if (this.isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')) - } else { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, size, status) - if (status) { - status.set = 'replace' - const oldValue = - oldVal && this.isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal - if (oldValue !== undefined) status.oldValue = oldValue - } - } else if (status) { - status.set = 'update' - } - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl, start) - } - this.statusTTL(status, index) - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex() { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max && this.max !== 0) { - return this.evict(false) - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop() { - if (this.size) { - const val = this.valList[this.head] - this.evict(true) - return val - } - } - - evict(free) { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')) - } else { - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - } - this.removeItemSize(head) - // if we aren't about to use the index, then null these out - if (free) { - this.keyList[head] = null - this.valList[head] = null - this.free.push(head) - } - this.head = this.next[head] - this.keyMap.delete(k) - this.size-- - return head - } - - has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (!this.isStale(index)) { - if (updateAgeOnHas) { - this.updateItemAge(index) - } - if (status) status.has = 'hit' - this.statusTTL(status, index) - return true - } else if (status) { - status.has = 'stale' - this.statusTTL(status, index) - } - } else if (status) { - status.has = 'miss' - } - return false - } - - // like get(), but without any LRU updating or TTL expiration - peek(k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - const v = this.valList[index] - // either stale and allowed, or forcing a refresh of non-stale value - return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v - } - } - - backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.valList[index] - if (this.isBackgroundFetch(v)) { - return v - } - const ac = new AC() - if (options.signal) { - options.signal.addEventListener('abort', () => - ac.abort(options.signal.reason) - ) - } - const fetchOpts = { - signal: ac.signal, - options, - context, - } - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal - const ignoreAbort = options.ignoreFetchAbort && v !== undefined - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true - options.status.fetchError = ac.signal.reason - if (ignoreAbort) options.status.fetchAbortIgnored = true - } else { - options.status.fetchResolved = true - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason) - } - // either we didn't abort, and are still here, or we did, and ignored - if (this.valList[index] === p) { - if (v === undefined) { - if (p.__staleWhileFetching) { - this.valList[index] = p.__staleWhileFetching - } else { - this.delete(k) - } - } else { - if (options.status) options.status.fetchUpdated = true - this.set(k, v, fetchOpts.options) - } - } - return v - } - const eb = er => { - if (options.status) { - options.status.fetchRejected = true - options.status.fetchError = er - } - return fetchFail(er) - } - const fetchFail = er => { - const { aborted } = ac.signal - const allowStaleAborted = - aborted && options.allowStaleOnFetchAbort - const allowStale = - allowStaleAborted || options.allowStaleOnFetchRejection - const noDelete = allowStale || options.noDeleteOnFetchRejection - if (this.valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || p.__staleWhileFetching === undefined - if (del) { - this.delete(k) - } else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.valList[index] = p.__staleWhileFetching - } - } - if (allowStale) { - if (options.status && p.__staleWhileFetching !== undefined) { - options.status.returnedStale = true - } - return p.__staleWhileFetching - } else if (p.__returned === p) { - throw er - } - } - const pcall = (res, rej) => { - this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if ( - !options.ignoreFetchAbort || - options.allowStaleOnFetchAbort - ) { - res() - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true) - } - } - }) - } - if (options.status) options.status.fetchDispatched = true - const p = new Promise(pcall).then(cb, eb) - p.__abortController = ac - p.__staleWhileFetching = v - p.__returned = null - if (index === undefined) { - // internal, don't expose status. - this.set(k, p, { ...fetchOpts.options, status: undefined }) - index = this.keyMap.get(k) - } else { - this.valList[index] = p - } - return p - } - - isBackgroundFetch(p) { - return ( - p && - typeof p === 'object' && - typeof p.then === 'function' && - Object.prototype.hasOwnProperty.call( - p, - '__staleWhileFetching' - ) && - Object.prototype.hasOwnProperty.call(p, '__returned') && - (p.__returned === p || p.__returned === null) - ) - } - - // this takes the union of get() and set() opts, because it does both - async fetch( - k, - { - // get options - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, - allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, - ignoreFetchAbort = this.ignoreFetchAbort, - allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - fetchContext = this.fetchContext, - forceRefresh = false, - status, - signal, - } = {} - ) { - if (!this.fetchMethod) { - if (status) status.fetch = 'get' - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }) - } - - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - } - - let index = this.keyMap.get(k) - if (index === undefined) { - if (status) status.fetch = 'miss' - const p = this.backgroundFetch(k, index, options, fetchContext) - return (p.__returned = p) - } else { - // in cache, maybe already fetching - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - const stale = - allowStale && v.__staleWhileFetching !== undefined - if (status) { - status.fetch = 'inflight' - if (stale) status.returnedStale = true - } - return stale ? v.__staleWhileFetching : (v.__returned = v) - } - - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.isStale(index) - if (!forceRefresh && !isStale) { - if (status) status.fetch = 'hit' - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - this.statusTTL(status, index) - return v - } - - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.backgroundFetch(k, index, options, fetchContext) - const hasStale = p.__staleWhileFetching !== undefined - const staleVal = hasStale && allowStale - if (status) { - status.fetch = hasStale && isStale ? 'stale' : 'refresh' - if (staleVal && isStale) status.returnedStale = true - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p) - } - } - - get( - k, - { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - status, - } = {} - ) { - const index = this.keyMap.get(k) - if (index !== undefined) { - const value = this.valList[index] - const fetching = this.isBackgroundFetch(value) - this.statusTTL(status, index) - if (this.isStale(index)) { - if (status) status.get = 'stale' - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k) - } - if (status) status.returnedStale = allowStale - return allowStale ? value : undefined - } else { - if (status) { - status.returnedStale = - allowStale && value.__staleWhileFetching !== undefined - } - return allowStale ? value.__staleWhileFetching : undefined - } - } else { - if (status) status.get = 'hit' - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching - } - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return value - } - } else if (status) { - status.get = 'miss' - } - } - - connect(p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del() { - deprecatedMethod('del', 'delete') - return this.delete - } - - delete(k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size-- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear() { - for (const index of this.rindexes({ allowStale: true })) { - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - const k = this.keyList[index] - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - } - - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - - get reset() { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length() { - deprecatedProperty('length', 'size') - return this.size - } - - static get AbortController() { - return AC - } - static get AbortSignal() { - return AS - } -} - -module.exports = LRUCache diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.mjs b/node_modules/tuf-js/node_modules/lru-cache/index.mjs deleted file mode 100644 index 4a0b4813ec515..0000000000000 --- a/node_modules/tuf-js/node_modules/lru-cache/index.mjs +++ /dev/null @@ -1,1227 +0,0 @@ -const perf = - typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date - -const hasAbortController = typeof AbortController === 'function' - -// minimal backwards-compatibility polyfill -// this doesn't have nearly all the checks and whatnot that -// actual AbortController/Signal has, but it's enough for -// our purposes, and if used properly, behaves the same. -const AC = hasAbortController - ? AbortController - : class AbortController { - constructor() { - this.signal = new AS() - } - abort(reason = new Error('This operation was aborted')) { - this.signal.reason = this.signal.reason || reason - this.signal.aborted = true - this.signal.dispatchEvent({ - type: 'abort', - target: this.signal, - }) - } - } - -const hasAbortSignal = typeof AbortSignal === 'function' -// Some polyfills put this on the AC class, not global -const hasACAbortSignal = typeof AC.AbortSignal === 'function' -const AS = hasAbortSignal - ? AbortSignal - : hasACAbortSignal - ? AC.AbortController - : class AbortSignal { - constructor() { - this.reason = undefined - this.aborted = false - this._listeners = [] - } - dispatchEvent(e) { - if (e.type === 'abort') { - this.aborted = true - this.onabort(e) - this._listeners.forEach(f => f(e), this) - } - } - onabort() {} - addEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners.push(fn) - } - } - removeEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners = this._listeners.filter(f => f !== fn) - } - } - } - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} - -const emitWarning = (...a) => { - typeof process === 'object' && - process && - typeof process.emitWarning === 'function' - ? process.emitWarning(...a) - : console.error(...a) -} - -const shouldWarn = code => !warned.has(code) - -const warn = (code, what, instead, fn) => { - warned.add(code) - const msg = `The ${what} is deprecated. Please use ${instead} instead.` - emitWarning(msg, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => - !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null - -class ZeroArray extends Array { - constructor(size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor(max) { - if (max === 0) { - return [] - } - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push(n) { - this.heap[this.length++] = n - } - pop() { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor(options = {}) { - const { - max = 0, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - updateAgeOnHas, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize = 0, - maxEntrySize = 0, - sizeCalculation, - fetchMethod, - fetchContext, - noDeleteOnFetchRejection, - noDeleteOnStaleGet, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { length, maxAge, stale } = - options instanceof LRUCache ? {} : options - - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer') - } - - const UintArray = max ? getUintArray(max) : Array - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize - this.maxEntrySize = maxEntrySize || this.maxSize - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize && !this.maxEntrySize) { - throw new TypeError( - 'cannot set sizeCalculation without setting maxSize or maxEntrySize' - ) - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function') - } - } - - this.fetchMethod = fetchMethod || null - if (this.fetchMethod && typeof this.fetchMethod !== 'function') { - throw new TypeError( - 'fetchMethod must be a function if specified' - ) - } - - this.fetchContext = fetchContext - if (!this.fetchMethod && fetchContext !== undefined) { - throw new TypeError( - 'cannot set fetchContext without fetchMethod' - ) - } - - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort - this.ignoreFetchAbort = !!ignoreFetchAbort - - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.maxSize !== 0) { - if (!isPosInt(this.maxSize)) { - throw new TypeError( - 'maxSize must be a positive integer if specified' - ) - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError( - 'maxEntrySize must be a positive integer if specified' - ) - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet - this.updateAgeOnGet = !!updateAgeOnGet - this.updateAgeOnHas = !!updateAgeOnHas - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError( - 'ttl must be a positive integer if specified' - ) - } - this.initializeTTLTracking() - } - - // do not allow completely unbounded caches - if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { - throw new TypeError( - 'At least one of max, maxSize, or ttl is required' - ) - } - if (!this.ttlAutopurge && !this.max && !this.maxSize) { - const code = 'LRU_CACHE_UNBOUNDED' - if (shouldWarn(code)) { - warned.add(code) - const msg = - 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.' - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) - } - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - getRemainingTTL(key) { - return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 - } - - initializeTTLTracking() { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - - this.setItemTTL = (index, ttl, start = perf.now()) => { - this.starts[index] = ttl !== 0 ? start : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - - this.updateItemAge = index => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - - this.statusTTL = (status, index) => { - if (status) { - status.ttl = this.ttls[index] - status.start = this.starts[index] - status.now = cachedNow || getNow() - status.remainingTTL = status.now + status.ttl - status.start - } - } - - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout( - () => (cachedNow = 0), - this.ttlResolution - ) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - - this.getRemainingTTL = key => { - const index = this.keyMap.get(key) - if (index === undefined) { - return 0 - } - return this.ttls[index] === 0 || this.starts[index] === 0 - ? Infinity - : this.starts[index] + - this.ttls[index] - - (cachedNow || getNow()) - } - - this.isStale = index => { - return ( - this.ttls[index] !== 0 && - this.starts[index] !== 0 && - (cachedNow || getNow()) - this.starts[index] > - this.ttls[index] - ) - } - } - updateItemAge(_index) {} - statusTTL(_status, _index) {} - setItemTTL(_index, _ttl, _start) {} - isStale(_index) { - return false - } - - initializeSizeTracking() { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => { - this.calculatedSize -= this.sizes[index] - this.sizes[index] = 0 - } - this.requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.isBackgroundFetch(v)) { - return 0 - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function') - } - size = sizeCalculation(v, k) - if (!isPosInt(size)) { - throw new TypeError( - 'sizeCalculation return invalid (expect positive integer)' - ) - } - } else { - throw new TypeError( - 'invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + - 'must be set.' - ) - } - } - return size - } - this.addItemSize = (index, size, status) => { - this.sizes[index] = size - if (this.maxSize) { - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict(true) - } - } - this.calculatedSize += this.sizes[index] - if (status) { - status.entrySize = size - status.totalCalculatedSize = this.calculatedSize - } - } - } - removeItemSize(_index) {} - addItemSize(_index, _size) {} - requireSize(_k, _v, size, sizeCalculation) { - if (size || sizeCalculation) { - throw new TypeError( - 'cannot set size without setting maxSize or maxEntrySize on cache' - ) - } - } - - *indexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex(index) { - return ( - index !== undefined && - this.keyMap.get(this.keyList[index]) === index - ) - } - - *entries() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - *rentries() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - - *keys() { - for (const i of this.indexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - *rkeys() { - for (const i of this.rindexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - - *values() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - *rvalues() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - - [Symbol.iterator]() { - return this.entries() - } - - find(fn, getOptions) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - if (fn(value, this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach(fn, thisp = this) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - rforEach(fn, thisp = this) { - for (const i of this.rindexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - get prune() { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale() { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump() { - const arr = [] - for (const i of this.indexes({ allowStale: true })) { - const key = this.keyList[i] - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.starts[i] - entry.start = Math.floor(Date.now() - age) - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load(arr) { - this.clear() - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset. - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start - entry.start = perf.now() - age - } - this.set(key, entry.value, entry) - } - } - - dispose(_v, _k, _reason) {} - - set( - k, - v, - { - ttl = this.ttl, - start, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - status, - } = {} - ) { - size = this.requireSize(k, v, size, sizeCalculation) - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss' - status.maxEntrySizeExceeded = true - } - // have to delete, in case a background fetch is there already. - // in non-async cases, this is a no-op - this.delete(k) - return this - } - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size++ - this.addItemSize(index, size, status) - if (status) { - status.set = 'add' - } - noUpdateTTL = false - } else { - // update - this.moveToTail(index) - const oldVal = this.valList[index] - if (v !== oldVal) { - if (this.isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')) - } else { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, size, status) - if (status) { - status.set = 'replace' - const oldValue = - oldVal && this.isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal - if (oldValue !== undefined) status.oldValue = oldValue - } - } else if (status) { - status.set = 'update' - } - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl, start) - } - this.statusTTL(status, index) - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex() { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max && this.max !== 0) { - return this.evict(false) - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop() { - if (this.size) { - const val = this.valList[this.head] - this.evict(true) - return val - } - } - - evict(free) { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')) - } else { - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - } - this.removeItemSize(head) - // if we aren't about to use the index, then null these out - if (free) { - this.keyList[head] = null - this.valList[head] = null - this.free.push(head) - } - this.head = this.next[head] - this.keyMap.delete(k) - this.size-- - return head - } - - has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (!this.isStale(index)) { - if (updateAgeOnHas) { - this.updateItemAge(index) - } - if (status) status.has = 'hit' - this.statusTTL(status, index) - return true - } else if (status) { - status.has = 'stale' - this.statusTTL(status, index) - } - } else if (status) { - status.has = 'miss' - } - return false - } - - // like get(), but without any LRU updating or TTL expiration - peek(k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - const v = this.valList[index] - // either stale and allowed, or forcing a refresh of non-stale value - return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v - } - } - - backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.valList[index] - if (this.isBackgroundFetch(v)) { - return v - } - const ac = new AC() - if (options.signal) { - options.signal.addEventListener('abort', () => - ac.abort(options.signal.reason) - ) - } - const fetchOpts = { - signal: ac.signal, - options, - context, - } - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal - const ignoreAbort = options.ignoreFetchAbort && v !== undefined - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true - options.status.fetchError = ac.signal.reason - if (ignoreAbort) options.status.fetchAbortIgnored = true - } else { - options.status.fetchResolved = true - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason) - } - // either we didn't abort, and are still here, or we did, and ignored - if (this.valList[index] === p) { - if (v === undefined) { - if (p.__staleWhileFetching) { - this.valList[index] = p.__staleWhileFetching - } else { - this.delete(k) - } - } else { - if (options.status) options.status.fetchUpdated = true - this.set(k, v, fetchOpts.options) - } - } - return v - } - const eb = er => { - if (options.status) { - options.status.fetchRejected = true - options.status.fetchError = er - } - return fetchFail(er) - } - const fetchFail = er => { - const { aborted } = ac.signal - const allowStaleAborted = - aborted && options.allowStaleOnFetchAbort - const allowStale = - allowStaleAborted || options.allowStaleOnFetchRejection - const noDelete = allowStale || options.noDeleteOnFetchRejection - if (this.valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || p.__staleWhileFetching === undefined - if (del) { - this.delete(k) - } else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.valList[index] = p.__staleWhileFetching - } - } - if (allowStale) { - if (options.status && p.__staleWhileFetching !== undefined) { - options.status.returnedStale = true - } - return p.__staleWhileFetching - } else if (p.__returned === p) { - throw er - } - } - const pcall = (res, rej) => { - this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if ( - !options.ignoreFetchAbort || - options.allowStaleOnFetchAbort - ) { - res() - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true) - } - } - }) - } - if (options.status) options.status.fetchDispatched = true - const p = new Promise(pcall).then(cb, eb) - p.__abortController = ac - p.__staleWhileFetching = v - p.__returned = null - if (index === undefined) { - // internal, don't expose status. - this.set(k, p, { ...fetchOpts.options, status: undefined }) - index = this.keyMap.get(k) - } else { - this.valList[index] = p - } - return p - } - - isBackgroundFetch(p) { - return ( - p && - typeof p === 'object' && - typeof p.then === 'function' && - Object.prototype.hasOwnProperty.call( - p, - '__staleWhileFetching' - ) && - Object.prototype.hasOwnProperty.call(p, '__returned') && - (p.__returned === p || p.__returned === null) - ) - } - - // this takes the union of get() and set() opts, because it does both - async fetch( - k, - { - // get options - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, - allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, - ignoreFetchAbort = this.ignoreFetchAbort, - allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - fetchContext = this.fetchContext, - forceRefresh = false, - status, - signal, - } = {} - ) { - if (!this.fetchMethod) { - if (status) status.fetch = 'get' - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }) - } - - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - } - - let index = this.keyMap.get(k) - if (index === undefined) { - if (status) status.fetch = 'miss' - const p = this.backgroundFetch(k, index, options, fetchContext) - return (p.__returned = p) - } else { - // in cache, maybe already fetching - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - const stale = - allowStale && v.__staleWhileFetching !== undefined - if (status) { - status.fetch = 'inflight' - if (stale) status.returnedStale = true - } - return stale ? v.__staleWhileFetching : (v.__returned = v) - } - - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.isStale(index) - if (!forceRefresh && !isStale) { - if (status) status.fetch = 'hit' - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - this.statusTTL(status, index) - return v - } - - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.backgroundFetch(k, index, options, fetchContext) - const hasStale = p.__staleWhileFetching !== undefined - const staleVal = hasStale && allowStale - if (status) { - status.fetch = hasStale && isStale ? 'stale' : 'refresh' - if (staleVal && isStale) status.returnedStale = true - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p) - } - } - - get( - k, - { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - status, - } = {} - ) { - const index = this.keyMap.get(k) - if (index !== undefined) { - const value = this.valList[index] - const fetching = this.isBackgroundFetch(value) - this.statusTTL(status, index) - if (this.isStale(index)) { - if (status) status.get = 'stale' - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k) - } - if (status) status.returnedStale = allowStale - return allowStale ? value : undefined - } else { - if (status) { - status.returnedStale = - allowStale && value.__staleWhileFetching !== undefined - } - return allowStale ? value.__staleWhileFetching : undefined - } - } else { - if (status) status.get = 'hit' - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching - } - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return value - } - } else if (status) { - status.get = 'miss' - } - } - - connect(p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del() { - deprecatedMethod('del', 'delete') - return this.delete - } - - delete(k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size-- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear() { - for (const index of this.rindexes({ allowStale: true })) { - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - const k = this.keyList[index] - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - } - - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - - get reset() { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length() { - deprecatedProperty('length', 'size') - return this.size - } - - static get AbortController() { - return AC - } - static get AbortSignal() { - return AS - } -} - -export default LRUCache diff --git a/node_modules/tuf-js/node_modules/lru-cache/package.json b/node_modules/tuf-js/node_modules/lru-cache/package.json deleted file mode 100644 index 9684991727e7a..0000000000000 --- a/node_modules/tuf-js/node_modules/lru-cache/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "7.18.3", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "sideEffects": false, - "scripts": { - "build": "npm run prepare", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "size": "size-limit", - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "format": "prettier --write .", - "typedoc": "typedoc ./index.d.ts" - }, - "type": "commonjs", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", - "@types/tap": "^15.0.6", - "benchmark": "^2.1.4", - "c8": "^7.11.2", - "clock-mock": "^1.0.6", - "eslint-config-prettier": "^8.5.0", - "prettier": "^2.6.2", - "size-limit": "^7.0.8", - "tap": "^16.3.4", - "ts-node": "^10.7.0", - "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" - }, - "license": "ISC", - "files": [ - "index.js", - "index.mjs", - "index.d.ts" - ], - "engines": { - "node": ">=12" - }, - "prettier": { - "semi": false, - "printWidth": 70, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "tap": { - "nyc-arg": [ - "--include=index.js" - ], - "node-arg": [ - "--expose-gc", - "--require", - "ts-node/register" - ], - "ts": false - }, - "size-limit": [ - { - "path": "./index.js" - } - ] -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js deleted file mode 100644 index dd68492ed7ea7..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js +++ /dev/null @@ -1,214 +0,0 @@ -'use strict' -const LRU = require('lru-cache') -const url = require('url') -const isLambda = require('is-lambda') -const dns = require('./dns.js') - -const AGENT_CACHE = new LRU({ max: 50 }) -const HttpAgent = require('agentkeepalive') -const HttpsAgent = HttpAgent.HttpsAgent - -module.exports = getAgent - -const getAgentTimeout = timeout => - typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 - -const getMaxSockets = maxSockets => maxSockets || 15 - -function getAgent (uri, opts) { - const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) - const isHttps = parsedUri.protocol === 'https:' - const pxuri = getProxyUri(parsedUri.href, opts) - - // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout - // of zero disables the timeout behavior (OS limits still apply). Else, if - // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that - // the node-fetch-npm timeout will always fire first, giving us more - // consistent errors. - const agentTimeout = getAgentTimeout(opts.timeout) - const agentMaxSockets = getMaxSockets(opts.maxSockets) - - const key = [ - `https:${isHttps}`, - pxuri - ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` - : '>no-proxy<', - `local-address:${opts.localAddress || '>no-local-address<'}`, - `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, - `ca:${(isHttps && opts.ca) || '>no-ca<'}`, - `cert:${(isHttps && opts.cert) || '>no-cert<'}`, - `key:${(isHttps && opts.key) || '>no-key<'}`, - `timeout:${agentTimeout}`, - `maxSockets:${agentMaxSockets}`, - ].join(':') - - if (opts.agent != null) { // `agent: false` has special behavior! - return opts.agent - } - - // keep alive in AWS lambda makes no sense - const lambdaAgent = !isLambda ? null - : isHttps ? require('https').globalAgent - : require('http').globalAgent - - if (isLambda && !pxuri) { - return lambdaAgent - } - - if (AGENT_CACHE.peek(key)) { - return AGENT_CACHE.get(key) - } - - if (pxuri) { - const pxopts = isLambda ? { - ...opts, - agent: lambdaAgent, - } : opts - const proxy = getProxy(pxuri, pxopts, isHttps) - AGENT_CACHE.set(key, proxy) - return proxy - } - - const agent = isHttps ? new HttpsAgent({ - maxSockets: agentMaxSockets, - ca: opts.ca, - cert: opts.cert, - key: opts.key, - localAddress: opts.localAddress, - rejectUnauthorized: opts.rejectUnauthorized, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) : new HttpAgent({ - maxSockets: agentMaxSockets, - localAddress: opts.localAddress, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) - AGENT_CACHE.set(key, agent) - return agent -} - -function checkNoProxy (uri, opts) { - const host = new url.URL(uri).hostname.split('.').reverse() - let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) - if (typeof noproxy === 'string') { - noproxy = noproxy.split(',').map(n => n.trim()) - } - - return noproxy && noproxy.some(no => { - const noParts = no.split('.').filter(x => x).reverse() - if (!noParts.length) { - return false - } - for (let i = 0; i < noParts.length; i++) { - if (host[i] !== noParts[i]) { - return false - } - } - return true - }) -} - -module.exports.getProcessEnv = getProcessEnv - -function getProcessEnv (env) { - if (!env) { - return - } - - let value - - if (Array.isArray(env)) { - for (const e of env) { - value = process.env[e] || - process.env[e.toUpperCase()] || - process.env[e.toLowerCase()] - if (typeof value !== 'undefined') { - break - } - } - } - - if (typeof env === 'string') { - value = process.env[env] || - process.env[env.toUpperCase()] || - process.env[env.toLowerCase()] - } - - return value -} - -module.exports.getProxyUri = getProxyUri -function getProxyUri (uri, opts) { - const protocol = new url.URL(uri).protocol - - const proxy = opts.proxy || - ( - protocol === 'https:' && - getProcessEnv('https_proxy') - ) || - ( - protocol === 'http:' && - getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) - ) - if (!proxy) { - return null - } - - const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy - - return !checkNoProxy(uri, opts) && parsedProxy -} - -const getAuth = u => - u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) - : u.username ? decodeURIComponent(u.username) - : null - -const getPath = u => u.pathname + u.search + u.hash - -const HttpProxyAgent = require('http-proxy-agent') -const HttpsProxyAgent = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -module.exports.getProxy = getProxy -function getProxy (proxyUrl, opts, isHttps) { - // our current proxy agents do not support an overridden dns lookup method, so will not - // benefit from the dns cache - const popts = { - host: proxyUrl.hostname, - port: proxyUrl.port, - protocol: proxyUrl.protocol, - path: getPath(proxyUrl), - auth: getAuth(proxyUrl), - ca: opts.ca, - cert: opts.cert, - key: opts.key, - timeout: getAgentTimeout(opts.timeout), - localAddress: opts.localAddress, - maxSockets: getMaxSockets(opts.maxSockets), - rejectUnauthorized: opts.rejectUnauthorized, - } - - if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) { - return new HttpProxyAgent(popts) - } else { - return new HttpsProxyAgent(popts) - } - } else if (proxyUrl.protocol.startsWith('socks')) { - // socks-proxy-agent uses hostname not host - popts.hostname = popts.host - delete popts.host - return new SocksProxyAgent(popts) - } else { - throw Object.assign( - new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), - { - code: 'EUNSUPPORTEDPROXY', - url: proxyUrl.href, - } - ) - } -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index 45141095074ec..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,469 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js deleted file mode 100644 index 13102b57c4aa0..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js +++ /dev/null @@ -1,49 +0,0 @@ -const LRUCache = require('lru-cache') -const dns = require('dns') - -const defaultOptions = exports.defaultOptions = { - family: undefined, - hints: dns.ADDRCONFIG, - all: false, - verbatim: undefined, -} - -const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -exports.getLookup = (dnsOptions) => { - return (hostname, options, callback) => { - if (typeof options === 'function') { - callback = options - options = null - } else if (typeof options === 'number') { - options = { family: options } - } - - options = { ...defaultOptions, ...options } - - const key = JSON.stringify({ - hostname, - family: options.family, - hints: options.hints, - all: options.all, - verbatim: options.verbatim, - }) - - if (lookupCache.has(key)) { - const [address, family] = lookupCache.get(key) - process.nextTick(callback, null, address, family) - return - } - - dnsOptions.lookup(hostname, options, (err, address, family) => { - if (err) { - return callback(err) - } - - lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) - return callback(null, address, family) - }) - } -} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index f77511279f831..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,54 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index bdbcc79cad908..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,121 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') - -const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json deleted file mode 100644 index fd415dc9966fa..0000000000000 --- a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "11.1.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", - "publish": "true" - } -} diff --git a/node_modules/tuf-js/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/minipass/LICENSE deleted file mode 100644 index 97f8e32ed82e4..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/minipass/index.js b/node_modules/tuf-js/node_modules/minipass/index.js deleted file mode 100644 index ed07c17acd97b..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass/index.js +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -const EE = require('events') -const Stream = require('stream') -const stringdecoder = require('string_decoder') -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - -exports.Minipass = Minipass diff --git a/node_modules/tuf-js/node_modules/minipass/index.mjs b/node_modules/tuf-js/node_modules/minipass/index.mjs deleted file mode 100644 index 6ef6cd8cf0703..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass/index.mjs +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -import EE from 'events' -import Stream from 'stream' -import stringdecoder from 'string_decoder' -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -export class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - - diff --git a/node_modules/tuf-js/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/minipass/package.json deleted file mode 100644 index 0e20e988047f2..0000000000000 --- a/node_modules/tuf-js/node_modules/minipass/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "name": "minipass", - "version": "5.0.0", - "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, - "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json index 9187d88083272..6286e034453d6 100644 --- a/node_modules/tuf-js/package.json +++ b/node_modules/tuf-js/package.json @@ -1,6 +1,6 @@ { "name": "tuf-js", - "version": "1.1.7", + "version": "2.0.0", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -28,19 +28,16 @@ }, "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", "devDependencies": { - "@tufjs/repo-mock": "1.3.1", + "@tufjs/repo-mock": "2.0.0", "@types/debug": "^4.1.8", - "@types/make-fetch-happen": "^10.0.1", - "@types/node": "^20.2.5", - "nock": "^13.3.1", - "typescript": "^5.1.3" + "@types/make-fetch-happen": "^10.0.1" }, "dependencies": { - "@tufjs/models": "1.0.4", + "@tufjs/models": "2.0.0", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.1" + "make-fetch-happen": "^13.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/package-lock.json b/package-lock.json index 2144542b2f5f8..43b0ba6054880 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "@npmcli/package-json", "@npmcli/promise-spawn", "@npmcli/run-script", + "@sigstore/tuf", "abbrev", "archy", "cacache", @@ -66,7 +67,6 @@ "qrcode-terminal", "read", "semver", - "sigstore", "ssri", "supports-color", "tar", @@ -94,6 +94,7 @@ "@npmcli/package-json": "^5.0.0", "@npmcli/promise-spawn": "^6.0.2", "@npmcli/run-script": "^6.0.2", + "@sigstore/tuf": "^2.0.0", "abbrev": "^2.0.0", "archy": "~1.0.0", "cacache": "^18.0.0", @@ -144,7 +145,6 @@ "qrcode-terminal": "^0.12.0", "read": "^2.1.0", "semver": "^7.5.4", - "sigstore": "^1.7.0", "ssri": "^10.0.5", "supports-color": "^9.4.0", "tar": "^6.1.15", @@ -166,7 +166,7 @@ "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", - "@tufjs/repo-mock": "^1.3.1", + "@tufjs/repo-mock": "^2.0.0", "diff": "^5.1.0", "licensee": "^10.0.0", "nock": "^13.3.3", @@ -2847,6 +2847,25 @@ "node": ">=14" } }, + "node_modules/@sigstore/bundle": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.0.0.tgz", + "integrity": "sha512-EO7D7/kMtUsYn596WP+b5N/txWTgOt7N8vsZ2gyneMsxfrPW4FJHRZtMlZeGKCgBNCcjZhZ8ItyawkZqJC8XiA==", + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz", + "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@sigstore/protobuf-specs": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.1.0.tgz", @@ -2856,15 +2875,45 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@sigstore/sign": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.0.0.tgz", + "integrity": "sha512-f+r1jEDwM5969DTORRln9sDmWjTy1cOQzhU/iisGNzFdbF2TglmwNScbH6aiQ6QH4lc3jOXNMgKP6sec1kSVKA==", + "dependencies": { + "@sigstore/bundle": "^2.0.0", + "@sigstore/protobuf-specs": "^0.2.1", + "make-fetch-happen": "^13.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz", + "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@sigstore/tuf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.2.tgz", - "integrity": "sha512-vjwcYePJzM01Ha6oWWZ9gNcdIgnzyFxfqfWzph483DPJTH8Tb7f7bQRRll3CYVkyH56j0AgcPAcl6Vg95DPF+Q==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.0.0.tgz", + "integrity": "sha512-Ow/ZMFH9kdHbMNOH//rDuINblqufpqD+e3xS9JY5RRce+euh9eUsjSc6jodioMMi2roN9rSAk8LCuyW2hngAKw==", "inBundle": true, "dependencies": { - "@sigstore/protobuf-specs": "^0.1.0", - "tuf-js": "^1.1.7" + "@sigstore/protobuf-specs": "^0.2.1", + "tuf-js": "^2.0.0" }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz", + "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==", + "inBundle": true, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } @@ -2903,38 +2952,38 @@ "dev": true }, "node_modules/@tufjs/canonical-json": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz", - "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz", + "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==", "inBundle": true, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/@tufjs/models": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz", - "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.0.tgz", + "integrity": "sha512-c8nj8BaOExmZKO2DXhDfegyhSGcG9E/mPN3U13L+/PsoWm1uaGiHHjxqSHQiasDBQwDA3aHuw9+9spYAP1qvvg==", "inBundle": true, "dependencies": { - "@tufjs/canonical-json": "1.0.0", - "minimatch": "^9.0.0" + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.3" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/@tufjs/repo-mock": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-1.3.1.tgz", - "integrity": "sha512-7IDezQbPGReWD3xmgR2pAfG61BZpvW51XnB87OfuiJOe5mkGnziCTTGITtUC3A6htQr9shkk5qIKrhpoMXBwpQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-2.0.0.tgz", + "integrity": "sha512-z0OtgGBn05dhpcvLaOMQPG1V16zOAL8+2A/L6EGIUE8pOP6PMX6vlJGSsN9JQ9glakXupCN4fdX8JF9XrNYUrw==", "dev": true, "dependencies": { - "@tufjs/models": "1.0.4", - "nock": "^13.3.1" + "@tufjs/models": "2.0.0", + "nock": "^13.3.3" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/@types/debug": { @@ -11723,6 +11772,50 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/sigstore/node_modules/@sigstore/tuf": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.3.tgz", + "integrity": "sha512-2bRovzs0nJZFlCN3rXirE4gwxCn97JNjMmwpecqlbgV9WcxX7WRuIrgzx/X7Ib7MYRbyUTpBYE0s2x6AmZXnlg==", + "inBundle": true, + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.0", + "tuf-js": "^1.1.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz", + "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==", + "inBundle": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@tufjs/canonical-json": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz", + "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==", + "inBundle": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@tufjs/models": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz", + "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==", + "inBundle": true, + "dependencies": { + "@tufjs/canonical-json": "1.0.0", + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/sigstore/node_modules/cacache": { "version": "17.1.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz", @@ -11799,6 +11892,20 @@ "node": ">=8" } }, + "node_modules/sigstore/node_modules/tuf-js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz", + "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==", + "inBundle": true, + "dependencies": { + "@tufjs/models": "1.0.4", + "debug": "^4.3.4", + "make-fetch-happen": "^11.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -14912,93 +15019,17 @@ } }, "node_modules/tuf-js": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz", - "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.0.0.tgz", + "integrity": "sha512-Oq6w0MMFihvxCM0o733TIeLeuUrDuaVaOEUVXrQtq/J6YXoUmQU84JcAftJcDkxDkuTZ9jumZN7Dh7VlyNaeWA==", "inBundle": true, "dependencies": { - "@tufjs/models": "1.0.4", + "@tufjs/models": "2.0.0", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/cacache": { - "version": "17.1.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz", - "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==", - "inBundle": true, - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^7.0.3", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/cacache/node_modules/minipass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", - "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/tuf-js/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/tuf-js/node_modules/make-fetch-happen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", - "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", - "inBundle": true, - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" + "make-fetch-happen": "^13.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/tuf-js/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "inBundle": true, - "engines": { - "node": ">=8" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/tunnel": { @@ -16183,7 +16214,7 @@ "npm-registry-fetch": "^16.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7", - "sigstore": "^1.4.0", + "sigstore": "^2.0.0", "ssri": "^10.0.5" }, "devDependencies": { @@ -16198,6 +16229,28 @@ "node": "^16.14.0 || >=18.0.0" } }, + "workspaces/libnpmpublish/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz", + "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "workspaces/libnpmpublish/node_modules/sigstore": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.0.0.tgz", + "integrity": "sha512-RtTi90xIdzFmQAAKb9+Ki1nx4IR2Z5c+mFn3dN0xuPHgk3gTt3f7ZqKsZ9UFQP40ZAlm7un8LMyjhwgrTIXNPA==", + "dependencies": { + "@sigstore/bundle": "^2.0.0", + "@sigstore/protobuf-specs": "^0.2.1", + "@sigstore/sign": "^2.0.0", + "@sigstore/tuf": "^2.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, "workspaces/libnpmsearch": { "version": "6.0.2", "license": "ISC", diff --git a/package.json b/package.json index f03a8635c8c98..303fd1e42c853 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "@npmcli/package-json": "^5.0.0", "@npmcli/promise-spawn": "^6.0.2", "@npmcli/run-script": "^6.0.2", + "@sigstore/tuf": "^2.0.0", "abbrev": "^2.0.0", "archy": "~1.0.0", "cacache": "^18.0.0", @@ -109,7 +110,6 @@ "qrcode-terminal": "^0.12.0", "read": "^2.1.0", "semver": "^7.5.4", - "sigstore": "^1.7.0", "ssri": "^10.0.5", "supports-color": "^9.4.0", "tar": "^6.1.15", @@ -129,6 +129,7 @@ "@npmcli/package-json", "@npmcli/promise-spawn", "@npmcli/run-script", + "@sigstore/tuf", "abbrev", "archy", "cacache", @@ -179,7 +180,6 @@ "qrcode-terminal", "read", "semver", - "sigstore", "ssri", "supports-color", "tar", @@ -197,7 +197,7 @@ "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", - "@tufjs/repo-mock": "^1.3.1", + "@tufjs/repo-mock": "^2.0.0", "diff": "^5.1.0", "licensee": "^10.0.0", "nock": "^13.3.3", diff --git a/test/lib/commands/audit.js b/test/lib/commands/audit.js index 4014e73387351..ae6d6c170842f 100644 --- a/test/lib/commands/audit.js +++ b/test/lib/commands/audit.js @@ -1699,16 +1699,12 @@ t.test('audit signatures', async t => { const { npm } = await loadMockNpm(t, { prefixDir: installWithMultipleDeps, mocks: { - sigstore: { - sigstore: { - tuf: { - client: async () => ({ - getTarget: async () => { - throw new Error('error refreshing TUF metadata') - }, - }), + '@sigstore/tuf': { + initTUF: async () => ({ + getTarget: async () => { + throw new Error('error refreshing TUF metadata') }, - }, + }), }, }, }) diff --git a/workspaces/libnpmpublish/lib/provenance.js b/workspaces/libnpmpublish/lib/provenance.js index 398db1b4cd467..45fe963d5f36f 100644 --- a/workspaces/libnpmpublish/lib/provenance.js +++ b/workspaces/libnpmpublish/lib/provenance.js @@ -1,4 +1,4 @@ -const { sigstore } = require('sigstore') +const sigstore = require('sigstore') const { readFile } = require('fs/promises') const ci = require('ci-info') const { env } = process diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index 7dd2809873e4f..094414e716102 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -44,7 +44,7 @@ "npm-registry-fetch": "^16.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7", - "sigstore": "^1.4.0", + "sigstore": "^2.0.0", "ssri": "^10.0.5" }, "engines": { diff --git a/workspaces/libnpmpublish/test/publish.js b/workspaces/libnpmpublish/test/publish.js index f7e824a3805c8..4a745bab8a78b 100644 --- a/workspaces/libnpmpublish/test/publish.js +++ b/workspaces/libnpmpublish/test/publish.js @@ -725,7 +725,7 @@ t.test('automatic provenance with incorrect permissions', async t => { t.test('user-supplied provenance - success', async t => { const { publish } = t.mock('..', { '../lib/provenance': t.mock('../lib/provenance', { - sigstore: { sigstore: { verify: () => {} } }, + sigstore: { verify: () => {} }, }), })