summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/@sigstore/protobuf-specs
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/@sigstore/protobuf-specs')
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE202
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts46
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js89
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts52
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js119
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts939
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js1308
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts110
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js24
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts72
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js106
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts228
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js457
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts136
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js167
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts89
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js103
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts156
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js273
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts6
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js37
-rw-r--r--deps/npm/node_modules/@sigstore/protobuf-specs/package.json31
22 files changed, 4750 insertions, 0 deletions
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE b/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000..e9e7c1679a
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts
new file mode 100644
index 0000000000..81422a0075
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts
@@ -0,0 +1,46 @@
+/// <reference types="node" />
+/** An authenticated message of arbitrary type. */
+export interface Envelope {
+ /**
+ * Message to be signed. (In JSON, this is encoded as base64.)
+ * REQUIRED.
+ */
+ payload: Buffer;
+ /**
+ * String unambiguously identifying how to interpret payload.
+ * REQUIRED.
+ */
+ payloadType: string;
+ /**
+ * Signature over:
+ * PAE(type, body)
+ * Where PAE is defined as:
+ * PAE(type, body) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(body) + SP + body
+ * + = concatenation
+ * SP = ASCII space [0x20]
+ * "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31]
+ * LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros
+ * REQUIRED (length >= 1).
+ */
+ signatures: Signature[];
+}
+export interface Signature {
+ /**
+ * Signature itself. (In JSON, this is encoded as base64.)
+ * REQUIRED.
+ */
+ sig: Buffer;
+ /**
+ * Unauthenticated* hint identifying which public key was used.
+ * OPTIONAL.
+ */
+ keyid: string;
+}
+export declare const Envelope: {
+ fromJSON(object: any): Envelope;
+ toJSON(message: Envelope): unknown;
+};
+export declare const Signature: {
+ fromJSON(object: any): Signature;
+ toJSON(message: Signature): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000..715bb1aa5b
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+ return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+ fromJSON(object) {
+ return {
+ payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+ payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+ signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.payload !== undefined &&
+ (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+ message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+ if (message.signatures) {
+ obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+ }
+ else {
+ obj.signatures = [];
+ }
+ return obj;
+ },
+};
+function createBaseSignature() {
+ return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+ fromJSON(object) {
+ return {
+ sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+ keyid: isSet(object.keyid) ? String(object.keyid) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+ message.keyid !== undefined && (obj.keyid = message.keyid);
+ return obj;
+ },
+};
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (globalThis.Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (globalThis.Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts
new file mode 100644
index 0000000000..1b4ed47aad
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts
@@ -0,0 +1,52 @@
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+export declare enum FieldBehavior {
+ /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+ FIELD_BEHAVIOR_UNSPECIFIED = 0,
+ /**
+ * OPTIONAL - Specifically denotes a field as optional.
+ * While all fields in protocol buffers are optional, this may be specified
+ * for emphasis if appropriate.
+ */
+ OPTIONAL = 1,
+ /**
+ * REQUIRED - Denotes a field as required.
+ * This indicates that the field **must** be provided as part of the request,
+ * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+ */
+ REQUIRED = 2,
+ /**
+ * OUTPUT_ONLY - Denotes a field as output only.
+ * This indicates that the field is provided in responses, but including the
+ * field in a request does nothing (the server *must* ignore it and
+ * *must not* throw an error as a result of the field's presence).
+ */
+ OUTPUT_ONLY = 3,
+ /**
+ * INPUT_ONLY - Denotes a field as input only.
+ * This indicates that the field is provided in requests, and the
+ * corresponding field is not included in output.
+ */
+ INPUT_ONLY = 4,
+ /**
+ * IMMUTABLE - Denotes a field as immutable.
+ * This indicates that the field may be set once in a request to create a
+ * resource, but may not be changed thereafter.
+ */
+ IMMUTABLE = 5,
+ /**
+ * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+ * This indicates that the service may provide the elements of the list
+ * in any arbitrary order, rather than the order the user originally
+ * provided. Additionally, the list's order may or may not be stable.
+ */
+ UNORDERED_LIST = 6
+}
+export declare function fieldBehaviorFromJSON(object: any): FieldBehavior;
+export declare function fieldBehaviorToJSON(object: FieldBehavior): string;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000..f9b57cccdc
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+ /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+ FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+ /**
+ * OPTIONAL - Specifically denotes a field as optional.
+ * While all fields in protocol buffers are optional, this may be specified
+ * for emphasis if appropriate.
+ */
+ FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+ /**
+ * REQUIRED - Denotes a field as required.
+ * This indicates that the field **must** be provided as part of the request,
+ * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+ */
+ FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+ /**
+ * OUTPUT_ONLY - Denotes a field as output only.
+ * This indicates that the field is provided in responses, but including the
+ * field in a request does nothing (the server *must* ignore it and
+ * *must not* throw an error as a result of the field's presence).
+ */
+ FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+ /**
+ * INPUT_ONLY - Denotes a field as input only.
+ * This indicates that the field is provided in requests, and the
+ * corresponding field is not included in output.
+ */
+ FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+ /**
+ * IMMUTABLE - Denotes a field as immutable.
+ * This indicates that the field may be set once in a request to create a
+ * resource, but may not be changed thereafter.
+ */
+ FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+ /**
+ * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+ * This indicates that the service may provide the elements of the list
+ * in any arbitrary order, rather than the order the user originally
+ * provided. Additionally, the list's order may or may not be stable.
+ */
+ FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "FIELD_BEHAVIOR_UNSPECIFIED":
+ return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+ case 1:
+ case "OPTIONAL":
+ return FieldBehavior.OPTIONAL;
+ case 2:
+ case "REQUIRED":
+ return FieldBehavior.REQUIRED;
+ case 3:
+ case "OUTPUT_ONLY":
+ return FieldBehavior.OUTPUT_ONLY;
+ case 4:
+ case "INPUT_ONLY":
+ return FieldBehavior.INPUT_ONLY;
+ case 5:
+ case "IMMUTABLE":
+ return FieldBehavior.IMMUTABLE;
+ case 6:
+ case "UNORDERED_LIST":
+ return FieldBehavior.UNORDERED_LIST;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+ switch (object) {
+ case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+ return "FIELD_BEHAVIOR_UNSPECIFIED";
+ case FieldBehavior.OPTIONAL:
+ return "OPTIONAL";
+ case FieldBehavior.REQUIRED:
+ return "REQUIRED";
+ case FieldBehavior.OUTPUT_ONLY:
+ return "OUTPUT_ONLY";
+ case FieldBehavior.INPUT_ONLY:
+ return "INPUT_ONLY";
+ case FieldBehavior.IMMUTABLE:
+ return "IMMUTABLE";
+ case FieldBehavior.UNORDERED_LIST:
+ return "UNORDERED_LIST";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts
new file mode 100644
index 0000000000..ef43bf01c1
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts
@@ -0,0 +1,939 @@
+/// <reference types="node" />
+/**
+ * The protocol compiler can output a FileDescriptorSet containing the .proto
+ * files it parses.
+ */
+export interface FileDescriptorSet {
+ file: FileDescriptorProto[];
+}
+/** Describes a complete .proto file. */
+export interface FileDescriptorProto {
+ /** file name, relative to root of source tree */
+ name: string;
+ /** e.g. "foo", "foo.bar", etc. */
+ package: string;
+ /** Names of files imported by this file. */
+ dependency: string[];
+ /** Indexes of the public imported files in the dependency list above. */
+ publicDependency: number[];
+ /**
+ * Indexes of the weak imported files in the dependency list.
+ * For Google-internal migration only. Do not use.
+ */
+ weakDependency: number[];
+ /** All top-level definitions in this file. */
+ messageType: DescriptorProto[];
+ enumType: EnumDescriptorProto[];
+ service: ServiceDescriptorProto[];
+ extension: FieldDescriptorProto[];
+ options: FileOptions | undefined;
+ /**
+ * This field contains optional information about the original source code.
+ * You may safely remove this entire field without harming runtime
+ * functionality of the descriptors -- the information is needed only by
+ * development tools.
+ */
+ sourceCodeInfo: SourceCodeInfo | undefined;
+ /**
+ * The syntax of the proto file.
+ * The supported values are "proto2" and "proto3".
+ */
+ syntax: string;
+}
+/** Describes a message type. */
+export interface DescriptorProto {
+ name: string;
+ field: FieldDescriptorProto[];
+ extension: FieldDescriptorProto[];
+ nestedType: DescriptorProto[];
+ enumType: EnumDescriptorProto[];
+ extensionRange: DescriptorProto_ExtensionRange[];
+ oneofDecl: OneofDescriptorProto[];
+ options: MessageOptions | undefined;
+ reservedRange: DescriptorProto_ReservedRange[];
+ /**
+ * Reserved field names, which may not be used by fields in the same message.
+ * A given name may only be reserved once.
+ */
+ reservedName: string[];
+}
+export interface DescriptorProto_ExtensionRange {
+ /** Inclusive. */
+ start: number;
+ /** Exclusive. */
+ end: number;
+ options: ExtensionRangeOptions | undefined;
+}
+/**
+ * Range of reserved tag numbers. Reserved tag numbers may not be used by
+ * fields or extension ranges in the same message. Reserved ranges may
+ * not overlap.
+ */
+export interface DescriptorProto_ReservedRange {
+ /** Inclusive. */
+ start: number;
+ /** Exclusive. */
+ end: number;
+}
+export interface ExtensionRangeOptions {
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+/** Describes a field within a message. */
+export interface FieldDescriptorProto {
+ name: string;
+ number: number;
+ label: FieldDescriptorProto_Label;
+ /**
+ * If type_name is set, this need not be set. If both this and type_name
+ * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+ */
+ type: FieldDescriptorProto_Type;
+ /**
+ * For message and enum types, this is the name of the type. If the name
+ * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
+ * rules are used to find the type (i.e. first the nested types within this
+ * message are searched, then within the parent, on up to the root
+ * namespace).
+ */
+ typeName: string;
+ /**
+ * For extensions, this is the name of the type being extended. It is
+ * resolved in the same manner as type_name.
+ */
+ extendee: string;
+ /**
+ * For numeric types, contains the original text representation of the value.
+ * For booleans, "true" or "false".
+ * For strings, contains the default text contents (not escaped in any way).
+ * For bytes, contains the C escaped value. All bytes >= 128 are escaped.
+ */
+ defaultValue: string;
+ /**
+ * If set, gives the index of a oneof in the containing type's oneof_decl
+ * list. This field is a member of that oneof.
+ */
+ oneofIndex: number;
+ /**
+ * JSON name of this field. The value is set by protocol compiler. If the
+ * user has set a "json_name" option on this field, that option's value
+ * will be used. Otherwise, it's deduced from the field's name by converting
+ * it to camelCase.
+ */
+ jsonName: string;
+ options: FieldOptions | undefined;
+ /**
+ * If true, this is a proto3 "optional". When a proto3 field is optional, it
+ * tracks presence regardless of field type.
+ *
+ * When proto3_optional is true, this field must be belong to a oneof to
+ * signal to old proto3 clients that presence is tracked for this field. This
+ * oneof is known as a "synthetic" oneof, and this field must be its sole
+ * member (each proto3 optional field gets its own synthetic oneof). Synthetic
+ * oneofs exist in the descriptor only, and do not generate any API. Synthetic
+ * oneofs must be ordered after all "real" oneofs.
+ *
+ * For message fields, proto3_optional doesn't create any semantic change,
+ * since non-repeated message fields always track presence. However it still
+ * indicates the semantic detail of whether the user wrote "optional" or not.
+ * This can be useful for round-tripping the .proto file. For consistency we
+ * give message fields a synthetic oneof also, even though it is not required
+ * to track presence. This is especially important because the parser can't
+ * tell if a field is a message or an enum, so it must always create a
+ * synthetic oneof.
+ *
+ * Proto2 optional fields do not set this flag, because they already indicate
+ * optional with `LABEL_OPTIONAL`.
+ */
+ proto3Optional: boolean;
+}
+export declare enum FieldDescriptorProto_Type {
+ /**
+ * TYPE_DOUBLE - 0 is reserved for errors.
+ * Order is weird for historical reasons.
+ */
+ TYPE_DOUBLE = 1,
+ TYPE_FLOAT = 2,
+ /**
+ * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
+ * negative values are likely.
+ */
+ TYPE_INT64 = 3,
+ TYPE_UINT64 = 4,
+ /**
+ * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
+ * negative values are likely.
+ */
+ TYPE_INT32 = 5,
+ TYPE_FIXED64 = 6,
+ TYPE_FIXED32 = 7,
+ TYPE_BOOL = 8,
+ TYPE_STRING = 9,
+ /**
+ * TYPE_GROUP - Tag-delimited aggregate.
+ * Group type is deprecated and not supported in proto3. However, Proto3
+ * implementations should still be able to parse the group wire format and
+ * treat group fields as unknown fields.
+ */
+ TYPE_GROUP = 10,
+ /** TYPE_MESSAGE - Length-delimited aggregate. */
+ TYPE_MESSAGE = 11,
+ /** TYPE_BYTES - New in version 2. */
+ TYPE_BYTES = 12,
+ TYPE_UINT32 = 13,
+ TYPE_ENUM = 14,
+ TYPE_SFIXED32 = 15,
+ TYPE_SFIXED64 = 16,
+ /** TYPE_SINT32 - Uses ZigZag encoding. */
+ TYPE_SINT32 = 17,
+ /** TYPE_SINT64 - Uses ZigZag encoding. */
+ TYPE_SINT64 = 18
+}
+export declare function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type;
+export declare function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string;
+export declare enum FieldDescriptorProto_Label {
+ /** LABEL_OPTIONAL - 0 is reserved for errors */
+ LABEL_OPTIONAL = 1,
+ LABEL_REQUIRED = 2,
+ LABEL_REPEATED = 3
+}
+export declare function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label;
+export declare function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string;
+/** Describes a oneof. */
+export interface OneofDescriptorProto {
+ name: string;
+ options: OneofOptions | undefined;
+}
+/** Describes an enum type. */
+export interface EnumDescriptorProto {
+ name: string;
+ value: EnumValueDescriptorProto[];
+ options: EnumOptions | undefined;
+ /**
+ * Range of reserved numeric values. Reserved numeric values may not be used
+ * by enum values in the same enum declaration. Reserved ranges may not
+ * overlap.
+ */
+ reservedRange: EnumDescriptorProto_EnumReservedRange[];
+ /**
+ * Reserved enum value names, which may not be reused. A given name may only
+ * be reserved once.
+ */
+ reservedName: string[];
+}
+/**
+ * Range of reserved numeric values. Reserved values may not be used by
+ * entries in the same enum. Reserved ranges may not overlap.
+ *
+ * Note that this is distinct from DescriptorProto.ReservedRange in that it
+ * is inclusive such that it can appropriately represent the entire int32
+ * domain.
+ */
+export interface EnumDescriptorProto_EnumReservedRange {
+ /** Inclusive. */
+ start: number;
+ /** Inclusive. */
+ end: number;
+}
+/** Describes a value within an enum. */
+export interface EnumValueDescriptorProto {
+ name: string;
+ number: number;
+ options: EnumValueOptions | undefined;
+}
+/** Describes a service. */
+export interface ServiceDescriptorProto {
+ name: string;
+ method: MethodDescriptorProto[];
+ options: ServiceOptions | undefined;
+}
+/** Describes a method of a service. */
+export interface MethodDescriptorProto {
+ name: string;
+ /**
+ * Input and output type names. These are resolved in the same way as
+ * FieldDescriptorProto.type_name, but must refer to a message type.
+ */
+ inputType: string;
+ outputType: string;
+ options: MethodOptions | undefined;
+ /** Identifies if client streams multiple client messages */
+ clientStreaming: boolean;
+ /** Identifies if server streams multiple server messages */
+ serverStreaming: boolean;
+}
+export interface FileOptions {
+ /**
+ * Sets the Java package where classes generated from this .proto will be
+ * placed. By default, the proto package is used, but this is often
+ * inappropriate because proto packages do not normally start with backwards
+ * domain names.
+ */
+ javaPackage: string;
+ /**
+ * Controls the name of the wrapper Java class generated for the .proto file.
+ * That class will always contain the .proto file's getDescriptor() method as
+ * well as any top-level extensions defined in the .proto file.
+ * If java_multiple_files is disabled, then all the other classes from the
+ * .proto file will be nested inside the single wrapper outer class.
+ */
+ javaOuterClassname: string;
+ /**
+ * If enabled, then the Java code generator will generate a separate .java
+ * file for each top-level message, enum, and service defined in the .proto
+ * file. Thus, these types will *not* be nested inside the wrapper class
+ * named by java_outer_classname. However, the wrapper class will still be
+ * generated to contain the file's getDescriptor() method as well as any
+ * top-level extensions defined in the file.
+ */
+ javaMultipleFiles: boolean;
+ /**
+ * This option does nothing.
+ *
+ * @deprecated
+ */
+ javaGenerateEqualsAndHash: boolean;
+ /**
+ * If set true, then the Java2 code generator will generate code that
+ * throws an exception whenever an attempt is made to assign a non-UTF-8
+ * byte sequence to a string field.
+ * Message reflection will do the same.
+ * However, an extension field still accepts non-UTF-8 byte sequences.
+ * This option has no effect on when used with the lite runtime.
+ */
+ javaStringCheckUtf8: boolean;
+ optimizeFor: FileOptions_OptimizeMode;
+ /**
+ * Sets the Go package where structs generated from this .proto will be
+ * placed. If omitted, the Go package will be derived from the following:
+ * - The basename of the package import path, if provided.
+ * - Otherwise, the package statement in the .proto file, if present.
+ * - Otherwise, the basename of the .proto file, without extension.
+ */
+ goPackage: string;
+ /**
+ * Should generic services be generated in each language? "Generic" services
+ * are not specific to any particular RPC system. They are generated by the
+ * main code generators in each language (without additional plugins).
+ * Generic services were the only kind of service generation supported by
+ * early versions of google.protobuf.
+ *
+ * Generic services are now considered deprecated in favor of using plugins
+ * that generate code specific to your particular RPC system. Therefore,
+ * these default to false. Old code which depends on generic services should
+ * explicitly set them to true.
+ */
+ ccGenericServices: boolean;
+ javaGenericServices: boolean;
+ pyGenericServices: boolean;
+ phpGenericServices: boolean;
+ /**
+ * Is this file deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for everything in the file, or it will be completely ignored; in the very
+ * least, this is a formalization for deprecating files.
+ */
+ deprecated: boolean;
+ /**
+ * Enables the use of arenas for the proto messages in this file. This applies
+ * only to generated classes for C++.
+ */
+ ccEnableArenas: boolean;
+ /**
+ * Sets the objective c class prefix which is prepended to all objective c
+ * generated classes from this .proto. There is no default.
+ */
+ objcClassPrefix: string;
+ /** Namespace for generated classes; defaults to the package. */
+ csharpNamespace: string;
+ /**
+ * By default Swift generators will take the proto package and CamelCase it
+ * replacing '.' with underscore and use that to prefix the types/symbols
+ * defined. When this options is provided, they will use this value instead
+ * to prefix the types/symbols defined.
+ */
+ swiftPrefix: string;
+ /**
+ * Sets the php class prefix which is prepended to all php generated classes
+ * from this .proto. Default is empty.
+ */
+ phpClassPrefix: string;
+ /**
+ * Use this option to change the namespace of php generated classes. Default
+ * is empty. When this option is empty, the package name will be used for
+ * determining the namespace.
+ */
+ phpNamespace: string;
+ /**
+ * Use this option to change the namespace of php generated metadata classes.
+ * Default is empty. When this option is empty, the proto file name will be
+ * used for determining the namespace.
+ */
+ phpMetadataNamespace: string;
+ /**
+ * Use this option to change the package of ruby generated classes. Default
+ * is empty. When this option is not set, the package name will be used for
+ * determining the ruby package.
+ */
+ rubyPackage: string;
+ /**
+ * The parser stores options it doesn't recognize here.
+ * See the documentation for the "Options" section above.
+ */
+ uninterpretedOption: UninterpretedOption[];
+}
+/** Generated classes can be optimized for speed or code size. */
+export declare enum FileOptions_OptimizeMode {
+ /** SPEED - Generate complete code for parsing, serialization, */
+ SPEED = 1,
+ /** CODE_SIZE - etc. */
+ CODE_SIZE = 2,
+ /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+ LITE_RUNTIME = 3
+}
+export declare function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode;
+export declare function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string;
+export interface MessageOptions {
+ /**
+ * Set true to use the old proto1 MessageSet wire format for extensions.
+ * This is provided for backwards-compatibility with the MessageSet wire
+ * format. You should not use this for any other reason: It's less
+ * efficient, has fewer features, and is more complicated.
+ *
+ * The message must be defined exactly as follows:
+ * message Foo {
+ * option message_set_wire_format = true;
+ * extensions 4 to max;
+ * }
+ * Note that the message cannot have any defined fields; MessageSets only
+ * have extensions.
+ *
+ * All extensions of your type must be singular messages; e.g. they cannot
+ * be int32s, enums, or repeated messages.
+ *
+ * Because this is an option, the above two restrictions are not enforced by
+ * the protocol compiler.
+ */
+ messageSetWireFormat: boolean;
+ /**
+ * Disables the generation of the standard "descriptor()" accessor, which can
+ * conflict with a field of the same name. This is meant to make migration
+ * from proto1 easier; new code should avoid fields named "descriptor".
+ */
+ noStandardDescriptorAccessor: boolean;
+ /**
+ * Is this message deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for the message, or it will be completely ignored; in the very least,
+ * this is a formalization for deprecating messages.
+ */
+ deprecated: boolean;
+ /**
+ * Whether the message is an automatically generated map entry type for the
+ * maps field.
+ *
+ * For maps fields:
+ * map<KeyType, ValueType> map_field = 1;
+ * The parsed descriptor looks like:
+ * message MapFieldEntry {
+ * option map_entry = true;
+ * optional KeyType key = 1;
+ * optional ValueType value = 2;
+ * }
+ * repeated MapFieldEntry map_field = 1;
+ *
+ * Implementations may choose not to generate the map_entry=true message, but
+ * use a native map in the target language to hold the keys and values.
+ * The reflection APIs in such implementations still need to work as
+ * if the field is a repeated message field.
+ *
+ * NOTE: Do not set the option in .proto files. Always use the maps syntax
+ * instead. The option should only be implicitly set by the proto compiler
+ * parser.
+ */
+ mapEntry: boolean;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export interface FieldOptions {
+ /**
+ * The ctype option instructs the C++ code generator to use a different
+ * representation of the field than it normally would. See the specific
+ * options below. This option is not yet implemented in the open source
+ * release -- sorry, we'll try to include it in a future version!
+ */
+ ctype: FieldOptions_CType;
+ /**
+ * The packed option can be enabled for repeated primitive fields to enable
+ * a more efficient representation on the wire. Rather than repeatedly
+ * writing the tag and type for each element, the entire array is encoded as
+ * a single length-delimited blob. In proto3, only explicit setting it to
+ * false will avoid using packed encoding.
+ */
+ packed: boolean;
+ /**
+ * The jstype option determines the JavaScript type used for values of the
+ * field. The option is permitted only for 64 bit integral and fixed types
+ * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
+ * is represented as JavaScript string, which avoids loss of precision that
+ * can happen when a large value is converted to a floating point JavaScript.
+ * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+ * use the JavaScript "number" type. The behavior of the default option
+ * JS_NORMAL is implementation dependent.
+ *
+ * This option is an enum to permit additional types to be added, e.g.
+ * goog.math.Integer.
+ */
+ jstype: FieldOptions_JSType;
+ /**
+ * Should this field be parsed lazily? Lazy applies only to message-type
+ * fields. It means that when the outer message is initially parsed, the
+ * inner message's contents will not be parsed but instead stored in encoded
+ * form. The inner message will actually be parsed when it is first accessed.
+ *
+ * This is only a hint. Implementations are free to choose whether to use
+ * eager or lazy parsing regardless of the value of this option. However,
+ * setting this option true suggests that the protocol author believes that
+ * using lazy parsing on this field is worth the additional bookkeeping
+ * overhead typically needed to implement it.
+ *
+ * This option does not affect the public interface of any generated code;
+ * all method signatures remain the same. Furthermore, thread-safety of the
+ * interface is not affected by this option; const methods remain safe to
+ * call from multiple threads concurrently, while non-const methods continue
+ * to require exclusive access.
+ *
+ * Note that implementations may choose not to check required fields within
+ * a lazy sub-message. That is, calling IsInitialized() on the outer message
+ * may return true even if the inner message has missing required fields.
+ * This is necessary because otherwise the inner message would have to be
+ * parsed in order to perform the check, defeating the purpose of lazy
+ * parsing. An implementation which chooses not to check required fields
+ * must be consistent about it. That is, for any particular sub-message, the
+ * implementation must either *always* check its required fields, or *never*
+ * check its required fields, regardless of whether or not the message has
+ * been parsed.
+ *
+ * As of 2021, lazy does no correctness checks on the byte stream during
+ * parsing. This may lead to crashes if and when an invalid byte stream is
+ * finally parsed upon access.
+ *
+ * TODO(b/211906113): Enable validation on lazy fields.
+ */
+ lazy: boolean;
+ /**
+ * unverified_lazy does no correctness checks on the byte stream. This should
+ * only be used where lazy with verification is prohibitive for performance
+ * reasons.
+ */
+ unverifiedLazy: boolean;
+ /**
+ * Is this field deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for accessors, or it will be completely ignored; in the very least, this
+ * is a formalization for deprecating fields.
+ */
+ deprecated: boolean;
+ /** For Google-internal migration only. Do not use. */
+ weak: boolean;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export declare enum FieldOptions_CType {
+ /** STRING - Default mode. */
+ STRING = 0,
+ CORD = 1,
+ STRING_PIECE = 2
+}
+export declare function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType;
+export declare function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string;
+export declare enum FieldOptions_JSType {
+ /** JS_NORMAL - Use the default type. */
+ JS_NORMAL = 0,
+ /** JS_STRING - Use JavaScript strings. */
+ JS_STRING = 1,
+ /** JS_NUMBER - Use JavaScript numbers. */
+ JS_NUMBER = 2
+}
+export declare function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType;
+export declare function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string;
+export interface OneofOptions {
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export interface EnumOptions {
+ /**
+ * Set this option to true to allow mapping different tag names to the same
+ * value.
+ */
+ allowAlias: boolean;
+ /**
+ * Is this enum deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for the enum, or it will be completely ignored; in the very least, this
+ * is a formalization for deprecating enums.
+ */
+ deprecated: boolean;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export interface EnumValueOptions {
+ /**
+ * Is this enum value deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for the enum value, or it will be completely ignored; in the very least,
+ * this is a formalization for deprecating enum values.
+ */
+ deprecated: boolean;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export interface ServiceOptions {
+ /**
+ * Is this service deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for the service, or it will be completely ignored; in the very least,
+ * this is a formalization for deprecating services.
+ */
+ deprecated: boolean;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+export interface MethodOptions {
+ /**
+ * Is this method deprecated?
+ * Depending on the target platform, this can emit Deprecated annotations
+ * for the method, or it will be completely ignored; in the very least,
+ * this is a formalization for deprecating methods.
+ */
+ deprecated: boolean;
+ idempotencyLevel: MethodOptions_IdempotencyLevel;
+ /** The parser stores options it doesn't recognize here. See above. */
+ uninterpretedOption: UninterpretedOption[];
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+export declare enum MethodOptions_IdempotencyLevel {
+ IDEMPOTENCY_UNKNOWN = 0,
+ /** NO_SIDE_EFFECTS - implies idempotent */
+ NO_SIDE_EFFECTS = 1,
+ /** IDEMPOTENT - idempotent, but may have side effects */
+ IDEMPOTENT = 2
+}
+export declare function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel;
+export declare function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string;
+/**
+ * A message representing a option the parser does not recognize. This only
+ * appears in options protos created by the compiler::Parser class.
+ * DescriptorPool resolves these when building Descriptor objects. Therefore,
+ * options protos in descriptor objects (e.g. returned by Descriptor::options(),
+ * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+ * in them.
+ */
+export interface UninterpretedOption {
+ name: UninterpretedOption_NamePart[];
+ /**
+ * The value of the uninterpreted option, in whatever type the tokenizer
+ * identified it as during parsing. Exactly one of these should be set.
+ */
+ identifierValue: string;
+ positiveIntValue: string;
+ negativeIntValue: string;
+ doubleValue: number;
+ stringValue: Buffer;
+ aggregateValue: string;
+}
+/**
+ * The name of the uninterpreted option. Each string represents a segment in
+ * a dot-separated name. is_extension is true iff a segment represents an
+ * extension (denoted with parentheses in options specs in .proto files).
+ * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
+ * "foo.(bar.baz).moo".
+ */
+export interface UninterpretedOption_NamePart {
+ namePart: string;
+ isExtension: boolean;
+}
+/**
+ * Encapsulates information about the original source file from which a
+ * FileDescriptorProto was generated.
+ */
+export interface SourceCodeInfo {
+ /**
+ * A Location identifies a piece of source code in a .proto file which
+ * corresponds to a particular definition. This information is intended
+ * to be useful to IDEs, code indexers, documentation generators, and similar
+ * tools.
+ *
+ * For example, say we have a file like:
+ * message Foo {
+ * optional string foo = 1;
+ * }
+ * Let's look at just the field definition:
+ * optional string foo = 1;
+ * ^ ^^ ^^ ^ ^^^
+ * a bc de f ghi
+ * We have the following locations:
+ * span path represents
+ * [a,i) [ 4, 0, 2, 0 ] The whole field definition.
+ * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
+ * [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
+ * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
+ * [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
+ *
+ * Notes:
+ * - A location may refer to a repeated field itself (i.e. not to any
+ * particular index within it). This is used whenever a set of elements are
+ * logically enclosed in a single code segment. For example, an entire
+ * extend block (possibly containing multiple extension definitions) will
+ * have an outer location whose path refers to the "extensions" repeated
+ * field without an index.
+ * - Multiple locations may have the same path. This happens when a single
+ * logical declaration is spread out across multiple places. The most
+ * obvious example is the "extend" block again -- there may be multiple
+ * extend blocks in the same scope, each of which will have the same path.
+ * - A location's span is not always a subset of its parent's span. For
+ * example, the "extendee" of an extension declaration appears at the
+ * beginning of the "extend" block and is shared by all extensions within
+ * the block.
+ * - Just because a location's span is a subset of some other location's span
+ * does not mean that it is a descendant. For example, a "group" defines
+ * both a type and a field in a single declaration. Thus, the locations
+ * corresponding to the type and field and their components will overlap.
+ * - Code which tries to interpret locations should probably be designed to
+ * ignore those that it doesn't understand, as more types of locations could
+ * be recorded in the future.
+ */
+ location: SourceCodeInfo_Location[];
+}
+export interface SourceCodeInfo_Location {
+ /**
+ * Identifies which part of the FileDescriptorProto was defined at this
+ * location.
+ *
+ * Each element is a field number or an index. They form a path from
+ * the root FileDescriptorProto to the place where the definition occurs.
+ * For example, this path:
+ * [ 4, 3, 2, 7, 1 ]
+ * refers to:
+ * file.message_type(3) // 4, 3
+ * .field(7) // 2, 7
+ * .name() // 1
+ * This is because FileDescriptorProto.message_type has field number 4:
+ * repeated DescriptorProto message_type = 4;
+ * and DescriptorProto.field has field number 2:
+ * repeated FieldDescriptorProto field = 2;
+ * and FieldDescriptorProto.name has field number 1:
+ * optional string name = 1;
+ *
+ * Thus, the above path gives the location of a field name. If we removed
+ * the last element:
+ * [ 4, 3, 2, 7 ]
+ * this path refers to the whole field declaration (from the beginning
+ * of the label to the terminating semicolon).
+ */
+ path: number[];
+ /**
+ * Always has exactly three or four elements: start line, start column,
+ * end line (optional, otherwise assumed same as start line), end column.
+ * These are packed into a single field for efficiency. Note that line
+ * and column numbers are zero-based -- typically you will want to add
+ * 1 to each before displaying to a user.
+ */
+ span: number[];
+ /**
+ * If this SourceCodeInfo represents a complete declaration, these are any
+ * comments appearing before and after the declaration which appear to be
+ * attached to the declaration.
+ *
+ * A series of line comments appearing on consecutive lines, with no other
+ * tokens appearing on those lines, will be treated as a single comment.
+ *
+ * leading_detached_comments will keep paragraphs of comments that appear
+ * before (but not connected to) the current element. Each paragraph,
+ * separated by empty lines, will be one comment element in the repeated
+ * field.
+ *
+ * Only the comment content is provided; comment markers (e.g. //) are
+ * stripped out. For block comments, leading whitespace and an asterisk
+ * will be stripped from the beginning of each line other than the first.
+ * Newlines are included in the output.
+ *
+ * Examples:
+ *
+ * optional int32 foo = 1; // Comment attached to foo.
+ * // Comment attached to bar.
+ * optional int32 bar = 2;
+ *
+ * optional string baz = 3;
+ * // Comment attached to baz.
+ * // Another line attached to baz.
+ *
+ * // Comment attached to moo.
+ * //
+ * // Another line attached to moo.
+ * optional double moo = 4;
+ *
+ * // Detached comment for corge. This is not leading or trailing comments
+ * // to moo or corge because there are blank lines separating it from
+ * // both.
+ *
+ * // Detached comment for corge paragraph 2.
+ *
+ * optional string corge = 5;
+ * /* Block comment attached
+ * * to corge. Leading asterisks
+ * * will be removed. * /
+ * /* Block comment attached to
+ * * grault. * /
+ * optional int32 grault = 6;
+ *
+ * // ignored detached comments.
+ */
+ leadingComments: string;
+ trailingComments: string;
+ leadingDetachedComments: string[];
+}
+/**
+ * Describes the relationship between generated code and its original source
+ * file. A GeneratedCodeInfo message is associated with only one generated
+ * source file, but may contain references to different source .proto files.
+ */
+export interface GeneratedCodeInfo {
+ /**
+ * An Annotation connects some span of text in generated code to an element
+ * of its generating .proto file.
+ */
+ annotation: GeneratedCodeInfo_Annotation[];
+}
+export interface GeneratedCodeInfo_Annotation {
+ /**
+ * Identifies the element in the original source .proto file. This field
+ * is formatted the same as SourceCodeInfo.Location.path.
+ */
+ path: number[];
+ /** Identifies the filesystem path to the original source .proto. */
+ sourceFile: string;
+ /**
+ * Identifies the starting offset in bytes in the generated code
+ * that relates to the identified object.
+ */
+ begin: number;
+ /**
+ * Identifies the ending offset in bytes in the generated code that
+ * relates to the identified offset. The end offset should be one past
+ * the last relevant byte (so the length of the text = end - begin).
+ */
+ end: number;
+}
+export declare const FileDescriptorSet: {
+ fromJSON(object: any): FileDescriptorSet;
+ toJSON(message: FileDescriptorSet): unknown;
+};
+export declare const FileDescriptorProto: {
+ fromJSON(object: any): FileDescriptorProto;
+ toJSON(message: FileDescriptorProto): unknown;
+};
+export declare const DescriptorProto: {
+ fromJSON(object: any): DescriptorProto;
+ toJSON(message: DescriptorProto): unknown;
+};
+export declare const DescriptorProto_ExtensionRange: {
+ fromJSON(object: any): DescriptorProto_ExtensionRange;
+ toJSON(message: DescriptorProto_ExtensionRange): unknown;
+};
+export declare const DescriptorProto_ReservedRange: {
+ fromJSON(object: any): DescriptorProto_ReservedRange;
+ toJSON(message: DescriptorProto_ReservedRange): unknown;
+};
+export declare const ExtensionRangeOptions: {
+ fromJSON(object: any): ExtensionRangeOptions;
+ toJSON(message: ExtensionRangeOptions): unknown;
+};
+export declare const FieldDescriptorProto: {
+ fromJSON(object: any): FieldDescriptorProto;
+ toJSON(message: FieldDescriptorProto): unknown;
+};
+export declare const OneofDescriptorProto: {
+ fromJSON(object: any): OneofDescriptorProto;
+ toJSON(message: OneofDescriptorProto): unknown;
+};
+export declare const EnumDescriptorProto: {
+ fromJSON(object: any): EnumDescriptorProto;
+ toJSON(message: EnumDescriptorProto): unknown;
+};
+export declare const EnumDescriptorProto_EnumReservedRange: {
+ fromJSON(object: any): EnumDescriptorProto_EnumReservedRange;
+ toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown;
+};
+export declare const EnumValueDescriptorProto: {
+ fromJSON(object: any): EnumValueDescriptorProto;
+ toJSON(message: EnumValueDescriptorProto): unknown;
+};
+export declare const ServiceDescriptorProto: {
+ fromJSON(object: any): ServiceDescriptorProto;
+ toJSON(message: ServiceDescriptorProto): unknown;
+};
+export declare const MethodDescriptorProto: {
+ fromJSON(object: any): MethodDescriptorProto;
+ toJSON(message: MethodDescriptorProto): unknown;
+};
+export declare const FileOptions: {
+ fromJSON(object: any): FileOptions;
+ toJSON(message: FileOptions): unknown;
+};
+export declare const MessageOptions: {
+ fromJSON(object: any): MessageOptions;
+ toJSON(message: MessageOptions): unknown;
+};
+export declare const FieldOptions: {
+ fromJSON(object: any): FieldOptions;
+ toJSON(message: FieldOptions): unknown;
+};
+export declare const OneofOptions: {
+ fromJSON(object: any): OneofOptions;
+ toJSON(message: OneofOptions): unknown;
+};
+export declare const EnumOptions: {
+ fromJSON(object: any): EnumOptions;
+ toJSON(message: EnumOptions): unknown;
+};
+export declare const EnumValueOptions: {
+ fromJSON(object: any): EnumValueOptions;
+ toJSON(message: EnumValueOptions): unknown;
+};
+export declare const ServiceOptions: {
+ fromJSON(object: any): ServiceOptions;
+ toJSON(message: ServiceOptions): unknown;
+};
+export declare const MethodOptions: {
+ fromJSON(object: any): MethodOptions;
+ toJSON(message: MethodOptions): unknown;
+};
+export declare const UninterpretedOption: {
+ fromJSON(object: any): UninterpretedOption;
+ toJSON(message: UninterpretedOption): unknown;
+};
+export declare const UninterpretedOption_NamePart: {
+ fromJSON(object: any): UninterpretedOption_NamePart;
+ toJSON(message: UninterpretedOption_NamePart): unknown;
+};
+export declare const SourceCodeInfo: {
+ fromJSON(object: any): SourceCodeInfo;
+ toJSON(message: SourceCodeInfo): unknown;
+};
+export declare const SourceCodeInfo_Location: {
+ fromJSON(object: any): SourceCodeInfo_Location;
+ toJSON(message: SourceCodeInfo_Location): unknown;
+};
+export declare const GeneratedCodeInfo: {
+ fromJSON(object: any): GeneratedCodeInfo;
+ toJSON(message: GeneratedCodeInfo): unknown;
+};
+export declare const GeneratedCodeInfo_Annotation: {
+ fromJSON(object: any): GeneratedCodeInfo_Annotation;
+ toJSON(message: GeneratedCodeInfo_Annotation): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000..b8cfc86ab9
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+ /**
+ * TYPE_DOUBLE - 0 is reserved for errors.
+ * Order is weird for historical reasons.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+ /**
+ * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
+ * negative values are likely.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+ /**
+ * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
+ * negative values are likely.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+ /**
+ * TYPE_GROUP - Tag-delimited aggregate.
+ * Group type is deprecated and not supported in proto3. However, Proto3
+ * implementations should still be able to parse the group wire format and
+ * treat group fields as unknown fields.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+ /** TYPE_MESSAGE - Length-delimited aggregate. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+ /** TYPE_BYTES - New in version 2. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+ /** TYPE_SINT32 - Uses ZigZag encoding. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+ /** TYPE_SINT64 - Uses ZigZag encoding. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "TYPE_DOUBLE":
+ return FieldDescriptorProto_Type.TYPE_DOUBLE;
+ case 2:
+ case "TYPE_FLOAT":
+ return FieldDescriptorProto_Type.TYPE_FLOAT;
+ case 3:
+ case "TYPE_INT64":
+ return FieldDescriptorProto_Type.TYPE_INT64;
+ case 4:
+ case "TYPE_UINT64":
+ return FieldDescriptorProto_Type.TYPE_UINT64;
+ case 5:
+ case "TYPE_INT32":
+ return FieldDescriptorProto_Type.TYPE_INT32;
+ case 6:
+ case "TYPE_FIXED64":
+ return FieldDescriptorProto_Type.TYPE_FIXED64;
+ case 7:
+ case "TYPE_FIXED32":
+ return FieldDescriptorProto_Type.TYPE_FIXED32;
+ case 8:
+ case "TYPE_BOOL":
+ return FieldDescriptorProto_Type.TYPE_BOOL;
+ case 9:
+ case "TYPE_STRING":
+ return FieldDescriptorProto_Type.TYPE_STRING;
+ case 10:
+ case "TYPE_GROUP":
+ return FieldDescriptorProto_Type.TYPE_GROUP;
+ case 11:
+ case "TYPE_MESSAGE":
+ return FieldDescriptorProto_Type.TYPE_MESSAGE;
+ case 12:
+ case "TYPE_BYTES":
+ return FieldDescriptorProto_Type.TYPE_BYTES;
+ case 13:
+ case "TYPE_UINT32":
+ return FieldDescriptorProto_Type.TYPE_UINT32;
+ case 14:
+ case "TYPE_ENUM":
+ return FieldDescriptorProto_Type.TYPE_ENUM;
+ case 15:
+ case "TYPE_SFIXED32":
+ return FieldDescriptorProto_Type.TYPE_SFIXED32;
+ case 16:
+ case "TYPE_SFIXED64":
+ return FieldDescriptorProto_Type.TYPE_SFIXED64;
+ case 17:
+ case "TYPE_SINT32":
+ return FieldDescriptorProto_Type.TYPE_SINT32;
+ case 18:
+ case "TYPE_SINT64":
+ return FieldDescriptorProto_Type.TYPE_SINT64;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+ switch (object) {
+ case FieldDescriptorProto_Type.TYPE_DOUBLE:
+ return "TYPE_DOUBLE";
+ case FieldDescriptorProto_Type.TYPE_FLOAT:
+ return "TYPE_FLOAT";
+ case FieldDescriptorProto_Type.TYPE_INT64:
+ return "TYPE_INT64";
+ case FieldDescriptorProto_Type.TYPE_UINT64:
+ return "TYPE_UINT64";
+ case FieldDescriptorProto_Type.TYPE_INT32:
+ return "TYPE_INT32";
+ case FieldDescriptorProto_Type.TYPE_FIXED64:
+ return "TYPE_FIXED64";
+ case FieldDescriptorProto_Type.TYPE_FIXED32:
+ return "TYPE_FIXED32";
+ case FieldDescriptorProto_Type.TYPE_BOOL:
+ return "TYPE_BOOL";
+ case FieldDescriptorProto_Type.TYPE_STRING:
+ return "TYPE_STRING";
+ case FieldDescriptorProto_Type.TYPE_GROUP:
+ return "TYPE_GROUP";
+ case FieldDescriptorProto_Type.TYPE_MESSAGE:
+ return "TYPE_MESSAGE";
+ case FieldDescriptorProto_Type.TYPE_BYTES:
+ return "TYPE_BYTES";
+ case FieldDescriptorProto_Type.TYPE_UINT32:
+ return "TYPE_UINT32";
+ case FieldDescriptorProto_Type.TYPE_ENUM:
+ return "TYPE_ENUM";
+ case FieldDescriptorProto_Type.TYPE_SFIXED32:
+ return "TYPE_SFIXED32";
+ case FieldDescriptorProto_Type.TYPE_SFIXED64:
+ return "TYPE_SFIXED64";
+ case FieldDescriptorProto_Type.TYPE_SINT32:
+ return "TYPE_SINT32";
+ case FieldDescriptorProto_Type.TYPE_SINT64:
+ return "TYPE_SINT64";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+ /** LABEL_OPTIONAL - 0 is reserved for errors */
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "LABEL_OPTIONAL":
+ return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+ case 2:
+ case "LABEL_REQUIRED":
+ return FieldDescriptorProto_Label.LABEL_REQUIRED;
+ case 3:
+ case "LABEL_REPEATED":
+ return FieldDescriptorProto_Label.LABEL_REPEATED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+ switch (object) {
+ case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+ return "LABEL_OPTIONAL";
+ case FieldDescriptorProto_Label.LABEL_REQUIRED:
+ return "LABEL_REQUIRED";
+ case FieldDescriptorProto_Label.LABEL_REPEATED:
+ return "LABEL_REPEATED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+ /** SPEED - Generate complete code for parsing, serialization, */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+ /** CODE_SIZE - etc. */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+ /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "SPEED":
+ return FileOptions_OptimizeMode.SPEED;
+ case 2:
+ case "CODE_SIZE":
+ return FileOptions_OptimizeMode.CODE_SIZE;
+ case 3:
+ case "LITE_RUNTIME":
+ return FileOptions_OptimizeMode.LITE_RUNTIME;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+ switch (object) {
+ case FileOptions_OptimizeMode.SPEED:
+ return "SPEED";
+ case FileOptions_OptimizeMode.CODE_SIZE:
+ return "CODE_SIZE";
+ case FileOptions_OptimizeMode.LITE_RUNTIME:
+ return "LITE_RUNTIME";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+ /** STRING - Default mode. */
+ FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+ FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+ FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "STRING":
+ return FieldOptions_CType.STRING;
+ case 1:
+ case "CORD":
+ return FieldOptions_CType.CORD;
+ case 2:
+ case "STRING_PIECE":
+ return FieldOptions_CType.STRING_PIECE;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+ switch (object) {
+ case FieldOptions_CType.STRING:
+ return "STRING";
+ case FieldOptions_CType.CORD:
+ return "CORD";
+ case FieldOptions_CType.STRING_PIECE:
+ return "STRING_PIECE";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+ /** JS_NORMAL - Use the default type. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+ /** JS_STRING - Use JavaScript strings. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+ /** JS_NUMBER - Use JavaScript numbers. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "JS_NORMAL":
+ return FieldOptions_JSType.JS_NORMAL;
+ case 1:
+ case "JS_STRING":
+ return FieldOptions_JSType.JS_STRING;
+ case 2:
+ case "JS_NUMBER":
+ return FieldOptions_JSType.JS_NUMBER;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+ switch (object) {
+ case FieldOptions_JSType.JS_NORMAL:
+ return "JS_NORMAL";
+ case FieldOptions_JSType.JS_STRING:
+ return "JS_STRING";
+ case FieldOptions_JSType.JS_NUMBER:
+ return "JS_NUMBER";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+ /** NO_SIDE_EFFECTS - implies idempotent */
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+ /** IDEMPOTENT - idempotent, but may have side effects */
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "IDEMPOTENCY_UNKNOWN":
+ return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+ case 1:
+ case "NO_SIDE_EFFECTS":
+ return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+ case 2:
+ case "IDEMPOTENT":
+ return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+ switch (object) {
+ case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+ return "IDEMPOTENCY_UNKNOWN";
+ case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+ return "NO_SIDE_EFFECTS";
+ case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+ return "IDEMPOTENT";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+ return { file: [] };
+}
+exports.FileDescriptorSet = {
+ fromJSON(object) {
+ return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.file) {
+ obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.file = [];
+ }
+ return obj;
+ },
+};
+function createBaseFileDescriptorProto() {
+ return {
+ name: "",
+ package: "",
+ dependency: [],
+ publicDependency: [],
+ weakDependency: [],
+ messageType: [],
+ enumType: [],
+ service: [],
+ extension: [],
+ options: undefined,
+ sourceCodeInfo: undefined,
+ syntax: "",
+ };
+}
+exports.FileDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ package: isSet(object.package) ? String(object.package) : "",
+ dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+ publicDependency: Array.isArray(object?.publicDependency)
+ ? object.publicDependency.map((e) => Number(e))
+ : [],
+ weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+ messageType: Array.isArray(object?.messageType)
+ ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+ : [],
+ enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+ service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+ extension: Array.isArray(object?.extension)
+ ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+ : [],
+ options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+ sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+ syntax: isSet(object.syntax) ? String(object.syntax) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.package !== undefined && (obj.package = message.package);
+ if (message.dependency) {
+ obj.dependency = message.dependency.map((e) => e);
+ }
+ else {
+ obj.dependency = [];
+ }
+ if (message.publicDependency) {
+ obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+ }
+ else {
+ obj.publicDependency = [];
+ }
+ if (message.weakDependency) {
+ obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+ }
+ else {
+ obj.weakDependency = [];
+ }
+ if (message.messageType) {
+ obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.messageType = [];
+ }
+ if (message.enumType) {
+ obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.enumType = [];
+ }
+ if (message.service) {
+ obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.service = [];
+ }
+ if (message.extension) {
+ obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.extension = [];
+ }
+ message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+ message.sourceCodeInfo !== undefined &&
+ (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+ message.syntax !== undefined && (obj.syntax = message.syntax);
+ return obj;
+ },
+};
+function createBaseDescriptorProto() {
+ return {
+ name: "",
+ field: [],
+ extension: [],
+ nestedType: [],
+ enumType: [],
+ extensionRange: [],
+ oneofDecl: [],
+ options: undefined,
+ reservedRange: [],
+ reservedName: [],
+ };
+}
+exports.DescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+ extension: Array.isArray(object?.extension)
+ ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+ : [],
+ nestedType: Array.isArray(object?.nestedType)
+ ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+ : [],
+ enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+ extensionRange: Array.isArray(object?.extensionRange)
+ ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+ : [],
+ oneofDecl: Array.isArray(object?.oneofDecl)
+ ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+ : [],
+ options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+ reservedRange: Array.isArray(object?.reservedRange)
+ ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+ : [],
+ reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.field) {
+ obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.field = [];
+ }
+ if (message.extension) {
+ obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.extension = [];
+ }
+ if (message.nestedType) {
+ obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.nestedType = [];
+ }
+ if (message.enumType) {
+ obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.enumType = [];
+ }
+ if (message.extensionRange) {
+ obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.extensionRange = [];
+ }
+ if (message.oneofDecl) {
+ obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.oneofDecl = [];
+ }
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+ if (message.reservedRange) {
+ obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.reservedRange = [];
+ }
+ if (message.reservedName) {
+ obj.reservedName = message.reservedName.map((e) => e);
+ }
+ else {
+ obj.reservedName = [];
+ }
+ return obj;
+ },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+ return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+ fromJSON(object) {
+ return {
+ start: isSet(object.start) ? Number(object.start) : 0,
+ end: isSet(object.end) ? Number(object.end) : 0,
+ options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseDescriptorProto_ReservedRange() {
+ return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+ fromJSON(object) {
+ return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+function createBaseExtensionRangeOptions() {
+ return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+ fromJSON(object) {
+ return {
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseFieldDescriptorProto() {
+ return {
+ name: "",
+ number: 0,
+ label: 1,
+ type: 1,
+ typeName: "",
+ extendee: "",
+ defaultValue: "",
+ oneofIndex: 0,
+ jsonName: "",
+ options: undefined,
+ proto3Optional: false,
+ };
+}
+exports.FieldDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ number: isSet(object.number) ? Number(object.number) : 0,
+ label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+ type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+ typeName: isSet(object.typeName) ? String(object.typeName) : "",
+ extendee: isSet(object.extendee) ? String(object.extendee) : "",
+ defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+ oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+ jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+ options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+ proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.number !== undefined && (obj.number = Math.round(message.number));
+ message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+ message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+ message.typeName !== undefined && (obj.typeName = message.typeName);
+ message.extendee !== undefined && (obj.extendee = message.extendee);
+ message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+ message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+ message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+ message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+ message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+ return obj;
+ },
+};
+function createBaseOneofDescriptorProto() {
+ return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseEnumDescriptorProto() {
+ return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+ options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+ reservedRange: Array.isArray(object?.reservedRange)
+ ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+ : [],
+ reservedName: Array.isArray(object?.reservedName)
+ ? object.reservedName.map((e) => String(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.value) {
+ obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.value = [];
+ }
+ message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+ if (message.reservedRange) {
+ obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.reservedRange = [];
+ }
+ if (message.reservedName) {
+ obj.reservedName = message.reservedName.map((e) => e);
+ }
+ else {
+ obj.reservedName = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+ return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+ fromJSON(object) {
+ return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+function createBaseEnumValueDescriptorProto() {
+ return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ number: isSet(object.number) ? Number(object.number) : 0,
+ options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.number !== undefined && (obj.number = Math.round(message.number));
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseServiceDescriptorProto() {
+ return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+ options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.method) {
+ obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.method = [];
+ }
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseMethodDescriptorProto() {
+ return {
+ name: "",
+ inputType: "",
+ outputType: "",
+ options: undefined,
+ clientStreaming: false,
+ serverStreaming: false,
+ };
+}
+exports.MethodDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ inputType: isSet(object.inputType) ? String(object.inputType) : "",
+ outputType: isSet(object.outputType) ? String(object.outputType) : "",
+ options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+ clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+ serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.inputType !== undefined && (obj.inputType = message.inputType);
+ message.outputType !== undefined && (obj.outputType = message.outputType);
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+ message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+ message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+ return obj;
+ },
+};
+function createBaseFileOptions() {
+ return {
+ javaPackage: "",
+ javaOuterClassname: "",
+ javaMultipleFiles: false,
+ javaGenerateEqualsAndHash: false,
+ javaStringCheckUtf8: false,
+ optimizeFor: 1,
+ goPackage: "",
+ ccGenericServices: false,
+ javaGenericServices: false,
+ pyGenericServices: false,
+ phpGenericServices: false,
+ deprecated: false,
+ ccEnableArenas: false,
+ objcClassPrefix: "",
+ csharpNamespace: "",
+ swiftPrefix: "",
+ phpClassPrefix: "",
+ phpNamespace: "",
+ phpMetadataNamespace: "",
+ rubyPackage: "",
+ uninterpretedOption: [],
+ };
+}
+exports.FileOptions = {
+ fromJSON(object) {
+ return {
+ javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+ javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+ javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+ javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+ ? Boolean(object.javaGenerateEqualsAndHash)
+ : false,
+ javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+ optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+ goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+ ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+ javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+ pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+ phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+ objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+ csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+ swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+ phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+ phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+ phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+ rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+ message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+ message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+ message.javaGenerateEqualsAndHash !== undefined &&
+ (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+ message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+ message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+ message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+ message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+ message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+ message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+ message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+ message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+ message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+ message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+ message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+ message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+ message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+ message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseMessageOptions() {
+ return {
+ messageSetWireFormat: false,
+ noStandardDescriptorAccessor: false,
+ deprecated: false,
+ mapEntry: false,
+ uninterpretedOption: [],
+ };
+}
+exports.MessageOptions = {
+ fromJSON(object) {
+ return {
+ messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+ noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+ ? Boolean(object.noStandardDescriptorAccessor)
+ : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+ message.noStandardDescriptorAccessor !== undefined &&
+ (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseFieldOptions() {
+ return {
+ ctype: 0,
+ packed: false,
+ jstype: 0,
+ lazy: false,
+ unverifiedLazy: false,
+ deprecated: false,
+ weak: false,
+ uninterpretedOption: [],
+ };
+}
+exports.FieldOptions = {
+ fromJSON(object) {
+ return {
+ ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+ packed: isSet(object.packed) ? Boolean(object.packed) : false,
+ jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+ lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+ unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ weak: isSet(object.weak) ? Boolean(object.weak) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+ message.packed !== undefined && (obj.packed = message.packed);
+ message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+ message.lazy !== undefined && (obj.lazy = message.lazy);
+ message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.weak !== undefined && (obj.weak = message.weak);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseOneofOptions() {
+ return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+ fromJSON(object) {
+ return {
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumOptions() {
+ return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+ fromJSON(object) {
+ return {
+ allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumValueOptions() {
+ return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseServiceOptions() {
+ return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseMethodOptions() {
+ return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ idempotencyLevel: isSet(object.idempotencyLevel)
+ ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+ : 0,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.idempotencyLevel !== undefined &&
+ (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseUninterpretedOption() {
+ return {
+ name: [],
+ identifierValue: "",
+ positiveIntValue: "0",
+ negativeIntValue: "0",
+ doubleValue: 0,
+ stringValue: Buffer.alloc(0),
+ aggregateValue: "",
+ };
+}
+exports.UninterpretedOption = {
+ fromJSON(object) {
+ return {
+ name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+ identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+ positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+ negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+ doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+ stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+ aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.name) {
+ obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+ }
+ else {
+ obj.name = [];
+ }
+ message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+ message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+ message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+ message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+ message.stringValue !== undefined &&
+ (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+ message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+ return obj;
+ },
+};
+function createBaseUninterpretedOption_NamePart() {
+ return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+ fromJSON(object) {
+ return {
+ namePart: isSet(object.namePart) ? String(object.namePart) : "",
+ isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.namePart !== undefined && (obj.namePart = message.namePart);
+ message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+ return obj;
+ },
+};
+function createBaseSourceCodeInfo() {
+ return { location: [] };
+}
+exports.SourceCodeInfo = {
+ fromJSON(object) {
+ return {
+ location: Array.isArray(object?.location)
+ ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.location) {
+ obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+ }
+ else {
+ obj.location = [];
+ }
+ return obj;
+ },
+};
+function createBaseSourceCodeInfo_Location() {
+ return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+ fromJSON(object) {
+ return {
+ path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+ span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+ leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+ trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+ leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+ ? object.leadingDetachedComments.map((e) => String(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.path) {
+ obj.path = message.path.map((e) => Math.round(e));
+ }
+ else {
+ obj.path = [];
+ }
+ if (message.span) {
+ obj.span = message.span.map((e) => Math.round(e));
+ }
+ else {
+ obj.span = [];
+ }
+ message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+ message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+ if (message.leadingDetachedComments) {
+ obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+ }
+ else {
+ obj.leadingDetachedComments = [];
+ }
+ return obj;
+ },
+};
+function createBaseGeneratedCodeInfo() {
+ return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+ fromJSON(object) {
+ return {
+ annotation: Array.isArray(object?.annotation)
+ ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.annotation) {
+ obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+ }
+ else {
+ obj.annotation = [];
+ }
+ return obj;
+ },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+ return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+ fromJSON(object) {
+ return {
+ path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+ sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+ begin: isSet(object.begin) ? Number(object.begin) : 0,
+ end: isSet(object.end) ? Number(object.end) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.path) {
+ obj.path = message.path.map((e) => Math.round(e));
+ }
+ else {
+ obj.path = [];
+ }
+ message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+ message.begin !== undefined && (obj.begin = Math.round(message.begin));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (globalThis.Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (globalThis.Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts
new file mode 100644
index 0000000000..1ab812b4a9
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts
@@ -0,0 +1,110 @@
+/**
+ * A Timestamp represents a point in time independent of any time zone or local
+ * calendar, encoded as a count of seconds and fractions of seconds at
+ * nanosecond resolution. The count is relative to an epoch at UTC midnight on
+ * January 1, 1970, in the proleptic Gregorian calendar which extends the
+ * Gregorian calendar backwards to year one.
+ *
+ * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+ * second table is needed for interpretation, using a [24-hour linear
+ * smear](https://developers.google.com/time/smear).
+ *
+ * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+ * restricting to that range, we ensure that we can convert to and from [RFC
+ * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+ *
+ * # Examples
+ *
+ * Example 1: Compute Timestamp from POSIX `time()`.
+ *
+ * Timestamp timestamp;
+ * timestamp.set_seconds(time(NULL));
+ * timestamp.set_nanos(0);
+ *
+ * Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+ *
+ * struct timeval tv;
+ * gettimeofday(&tv, NULL);
+ *
+ * Timestamp timestamp;
+ * timestamp.set_seconds(tv.tv_sec);
+ * timestamp.set_nanos(tv.tv_usec * 1000);
+ *
+ * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+ *
+ * FILETIME ft;
+ * GetSystemTimeAsFileTime(&ft);
+ * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+ *
+ * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ * Timestamp timestamp;
+ * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+ *
+ * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+ *
+ * long millis = System.currentTimeMillis();
+ *
+ * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ * .setNanos((int) ((millis % 1000) * 1000000)).build();
+ *
+ * Example 5: Compute Timestamp from Java `Instant.now()`.
+ *
+ * Instant now = Instant.now();
+ *
+ * Timestamp timestamp =
+ * Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ * .setNanos(now.getNano()).build();
+ *
+ * Example 6: Compute Timestamp from current time in Python.
+ *
+ * timestamp = Timestamp()
+ * timestamp.GetCurrentTime()
+ *
+ * # JSON Mapping
+ *
+ * In JSON format, the Timestamp type is encoded as a string in the
+ * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+ * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+ * where {year} is always expressed using four digits while {month}, {day},
+ * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+ * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+ * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+ * is required. A proto3 JSON serializer should always use UTC (as indicated by
+ * "Z") when printing the Timestamp type and a proto3 JSON parser should be
+ * able to accept both UTC and other timezones (as indicated by an offset).
+ *
+ * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+ * 01:30 UTC on January 15, 2017.
+ *
+ * In JavaScript, one can convert a Date object to this format using the
+ * standard
+ * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+ * method. In Python, a standard `datetime.datetime` object can be converted
+ * to this format using
+ * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+ * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+ * the Joda Time's [`ISODateTimeFormat.dateTime()`](
+ * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
+ * ) to obtain a formatter capable of generating timestamps in this format.
+ */
+export interface Timestamp {
+ /**
+ * Represents seconds of UTC time since Unix epoch
+ * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+ * 9999-12-31T23:59:59Z inclusive.
+ */
+ seconds: string;
+ /**
+ * Non-negative fractions of a second at nanosecond resolution. Negative
+ * second values with fractions must still have non-negative nanos values
+ * that count forward in time. Must be from 0 to 999,999,999
+ * inclusive.
+ */
+ nanos: number;
+}
+export declare const Timestamp: {
+ fromJSON(object: any): Timestamp;
+ toJSON(message: Timestamp): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000..159135fe87
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+ return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+ fromJSON(object) {
+ return {
+ seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+ nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.seconds !== undefined && (obj.seconds = message.seconds);
+ message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts
new file mode 100644
index 0000000000..51f748f459
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts
@@ -0,0 +1,72 @@
+import { Envelope } from "./envelope";
+import { MessageSignature, PublicKeyIdentifier, RFC3161SignedTimestamp, X509CertificateChain } from "./sigstore_common";
+import { TransparencyLogEntry } from "./sigstore_rekor";
+/**
+ * Various timestamped counter signatures over the artifacts signature.
+ * Currently only RFC3161 signatures are provided. More formats may be added
+ * in the future.
+ */
+export interface TimestampVerificationData {
+ /**
+ * A list of RFC3161 signed timestamps provided by the user.
+ * This can be used when the entry has not been stored on a
+ * transparency log, or in conjunction for a stronger trust model.
+ * Clients MUST verify the hashed message in the message imprint
+ * against the signature in the bundle.
+ */
+ rfc3161Timestamps: RFC3161SignedTimestamp[];
+}
+/**
+ * VerificationMaterial captures details on the materials used to verify
+ * signatures.
+ */
+export interface VerificationMaterial {
+ content?: {
+ $case: "publicKey";
+ publicKey: PublicKeyIdentifier;
+ } | {
+ $case: "x509CertificateChain";
+ x509CertificateChain: X509CertificateChain;
+ };
+ /**
+ * This is the inclusion promise and/or proof, where
+ * the timestamp is coming from the transparency log.
+ */
+ tlogEntries: TransparencyLogEntry[];
+ /** Timestamp verification data, over the artifact's signature. */
+ timestampVerificationData: TimestampVerificationData | undefined;
+}
+export interface Bundle {
+ /**
+ * MUST be application/vnd.dev.sigstore.bundle+json;version=0.1
+ * when encoded as JSON.
+ */
+ mediaType: string;
+ /**
+ * When a signer is identified by a X.509 certificate, a verifier MUST
+ * verify that the signature was computed at the time the certificate
+ * was valid as described in the Sigstore client spec: "Verification
+ * using a Bundle".
+ * <https://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln>
+ */
+ verificationMaterial: VerificationMaterial | undefined;
+ content?: {
+ $case: "messageSignature";
+ messageSignature: MessageSignature;
+ } | {
+ $case: "dsseEnvelope";
+ dsseEnvelope: Envelope;
+ };
+}
+export declare const TimestampVerificationData: {
+ fromJSON(object: any): TimestampVerificationData;
+ toJSON(message: TimestampVerificationData): unknown;
+};
+export declare const VerificationMaterial: {
+ fromJSON(object: any): VerificationMaterial;
+ toJSON(message: VerificationMaterial): unknown;
+};
+export declare const Bundle: {
+ fromJSON(object: any): Bundle;
+ toJSON(message: Bundle): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000..1ef3e1b335
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+ return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+ fromJSON(object) {
+ return {
+ rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+ ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.rfc3161Timestamps) {
+ obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+ }
+ else {
+ obj.rfc3161Timestamps = [];
+ }
+ return obj;
+ },
+};
+function createBaseVerificationMaterial() {
+ return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+ fromJSON(object) {
+ return {
+ content: isSet(object.publicKey)
+ ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+ : isSet(object.x509CertificateChain)
+ ? {
+ $case: "x509CertificateChain",
+ x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+ }
+ : undefined,
+ tlogEntries: Array.isArray(object?.tlogEntries)
+ ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+ : [],
+ timestampVerificationData: isSet(object.timestampVerificationData)
+ ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.content?.$case === "publicKey" &&
+ (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+ message.content?.$case === "x509CertificateChain" &&
+ (obj.x509CertificateChain = message.content?.x509CertificateChain
+ ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+ : undefined);
+ if (message.tlogEntries) {
+ obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+ }
+ else {
+ obj.tlogEntries = [];
+ }
+ message.timestampVerificationData !== undefined &&
+ (obj.timestampVerificationData = message.timestampVerificationData
+ ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+ : undefined);
+ return obj;
+ },
+};
+function createBaseBundle() {
+ return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ verificationMaterial: isSet(object.verificationMaterial)
+ ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+ : undefined,
+ content: isSet(object.messageSignature)
+ ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+ : isSet(object.dsseEnvelope)
+ ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+ message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+ ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+ : undefined);
+ message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+ ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+ : undefined);
+ message.content?.$case === "dsseEnvelope" &&
+ (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts
new file mode 100644
index 0000000000..0d8c2d5ebd
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts
@@ -0,0 +1,228 @@
+/// <reference types="node" />
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+export declare enum HashAlgorithm {
+ HASH_ALGORITHM_UNSPECIFIED = 0,
+ SHA2_256 = 1
+}
+export declare function hashAlgorithmFromJSON(object: any): HashAlgorithm;
+export declare function hashAlgorithmToJSON(object: HashAlgorithm): string;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+export declare enum PublicKeyDetails {
+ PUBLIC_KEY_DETAILS_UNSPECIFIED = 0,
+ /** PKCS1_RSA_PKCS1V5 - RSA */
+ PKCS1_RSA_PKCS1V5 = 1,
+ /** PKCS1_RSA_PSS - See RFC8017 */
+ PKCS1_RSA_PSS = 2,
+ PKIX_RSA_PKCS1V5 = 3,
+ PKIX_RSA_PSS = 4,
+ /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+ PKIX_ECDSA_P256_SHA_256 = 5,
+ /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+ PKIX_ECDSA_P256_HMAC_SHA_256 = 6,
+ /** PKIX_ED25519 - Ed 25519 */
+ PKIX_ED25519 = 7
+}
+export declare function publicKeyDetailsFromJSON(object: any): PublicKeyDetails;
+export declare function publicKeyDetailsToJSON(object: PublicKeyDetails): string;
+export declare enum SubjectAlternativeNameType {
+ SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0,
+ EMAIL = 1,
+ URI = 2,
+ /**
+ * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+ * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+ * for more details.
+ */
+ OTHER_NAME = 3
+}
+export declare function subjectAlternativeNameTypeFromJSON(object: any): SubjectAlternativeNameType;
+export declare function subjectAlternativeNameTypeToJSON(object: SubjectAlternativeNameType): string;
+/**
+ * HashOutput captures a digest of a 'message' (generic octet sequence)
+ * and the corresponding hash algorithm used.
+ */
+export interface HashOutput {
+ algorithm: HashAlgorithm;
+ /**
+ * This is the raw octets of the message digest as computed by
+ * the hash algorithm.
+ */
+ digest: Buffer;
+}
+/** MessageSignature stores the computed signature over a message. */
+export interface MessageSignature {
+ /** Message digest can be used to identify the artifact. */
+ messageDigest: HashOutput | undefined;
+ /**
+ * The raw bytes as returned from the signature algorithm.
+ * The signature algorithm (and so the format of the signature bytes)
+ * are determined by the contents of the 'verification_material',
+ * either a key-pair or a certificate. If using a certificate, the
+ * certificate contains the required information on the signature
+ * algorithm.
+ * When using a key pair, the algorithm MUST be part of the public
+ * key, which MUST be communicated out-of-band.
+ */
+ signature: Buffer;
+}
+/** LogId captures the identity of a transparency log. */
+export interface LogId {
+ /**
+ * The unique id of the log, represented as the SHA-256 hash
+ * of the log's public key, computed over the DER encoding.
+ * <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
+ */
+ keyId: Buffer;
+}
+/** This message holds a RFC 3161 timestamp. */
+export interface RFC3161SignedTimestamp {
+ /**
+ * Signed timestamp is the DER encoded TimeStampResponse.
+ * See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2
+ */
+ signedTimestamp: Buffer;
+}
+export interface PublicKey {
+ /**
+ * DER-encoded public key, encoding method is specified by the
+ * key_details attribute.
+ */
+ rawBytes?: Buffer | undefined;
+ /** Key encoding and signature algorithm to use for this key. */
+ keyDetails: PublicKeyDetails;
+ /** Optional validity period for this key. */
+ validFor?: TimeRange | undefined;
+}
+/**
+ * PublicKeyIdentifier can be used to identify an (out of band) delivered
+ * key, to verify a signature.
+ */
+export interface PublicKeyIdentifier {
+ /**
+ * Optional unauthenticated hint on which key to use.
+ * The format of the hint must be agreed upon out of band by the
+ * signer and the verifiers, and so is not subject to this
+ * specification.
+ * Example use-case is to specify the public key to use, from a
+ * trusted key-ring.
+ * Implementors are RECOMMENDED to derive the value from the public
+ * key as described in RFC 6962.
+ * See: <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
+ */
+ hint: string;
+}
+/** An ASN.1 OBJECT IDENTIFIER */
+export interface ObjectIdentifier {
+ id: number[];
+}
+/** An OID and the corresponding (byte) value. */
+export interface ObjectIdentifierValuePair {
+ oid: ObjectIdentifier | undefined;
+ value: Buffer;
+}
+export interface DistinguishedName {
+ organization: string;
+ commonName: string;
+}
+export interface X509Certificate {
+ /** DER-encoded X.509 certificate. */
+ rawBytes: Buffer;
+}
+export interface SubjectAlternativeName {
+ type: SubjectAlternativeNameType;
+ identity?: {
+ $case: "regexp";
+ regexp: string;
+ } | {
+ $case: "value";
+ value: string;
+ };
+}
+/** A chain of X.509 certificates. */
+export interface X509CertificateChain {
+ /**
+ * The chain of certificates, with indices 0 to n.
+ * The first certificate in the array must be the leaf
+ * certificate used for signing. Any intermediate certificates
+ * must be stored as offset 1 to n-1, and the root certificate at
+ * position n.
+ */
+ certificates: X509Certificate[];
+}
+/**
+ * The time range is half-open and does not include the end timestamp,
+ * i.e [start, end).
+ * End is optional to be able to capture a period that has started but
+ * has no known end.
+ */
+export interface TimeRange {
+ start: Date | undefined;
+ end?: Date | undefined;
+}
+export declare const HashOutput: {
+ fromJSON(object: any): HashOutput;
+ toJSON(message: HashOutput): unknown;
+};
+export declare const MessageSignature: {
+ fromJSON(object: any): MessageSignature;
+ toJSON(message: MessageSignature): unknown;
+};
+export declare const LogId: {
+ fromJSON(object: any): LogId;
+ toJSON(message: LogId): unknown;
+};
+export declare const RFC3161SignedTimestamp: {
+ fromJSON(object: any): RFC3161SignedTimestamp;
+ toJSON(message: RFC3161SignedTimestamp): unknown;
+};
+export declare const PublicKey: {
+ fromJSON(object: any): PublicKey;
+ toJSON(message: PublicKey): unknown;
+};
+export declare const PublicKeyIdentifier: {
+ fromJSON(object: any): PublicKeyIdentifier;
+ toJSON(message: PublicKeyIdentifier): unknown;
+};
+export declare const ObjectIdentifier: {
+ fromJSON(object: any): ObjectIdentifier;
+ toJSON(message: ObjectIdentifier): unknown;
+};
+export declare const ObjectIdentifierValuePair: {
+ fromJSON(object: any): ObjectIdentifierValuePair;
+ toJSON(message: ObjectIdentifierValuePair): unknown;
+};
+export declare const DistinguishedName: {
+ fromJSON(object: any): DistinguishedName;
+ toJSON(message: DistinguishedName): unknown;
+};
+export declare const X509Certificate: {
+ fromJSON(object: any): X509Certificate;
+ toJSON(message: X509Certificate): unknown;
+};
+export declare const SubjectAlternativeName: {
+ fromJSON(object: any): SubjectAlternativeName;
+ toJSON(message: SubjectAlternativeName): unknown;
+};
+export declare const X509CertificateChain: {
+ fromJSON(object: any): X509CertificateChain;
+ toJSON(message: X509CertificateChain): unknown;
+};
+export declare const TimeRange: {
+ fromJSON(object: any): TimeRange;
+ toJSON(message: TimeRange): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000..63ace8db58
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+ HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+ HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "HASH_ALGORITHM_UNSPECIFIED":
+ return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+ case 1:
+ case "SHA2_256":
+ return HashAlgorithm.SHA2_256;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+ switch (object) {
+ case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+ return "HASH_ALGORITHM_UNSPECIFIED";
+ case HashAlgorithm.SHA2_256:
+ return "SHA2_256";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+ PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+ /** PKCS1_RSA_PKCS1V5 - RSA */
+ PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+ /** PKCS1_RSA_PSS - See RFC8017 */
+ PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+ /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+ /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+ /** PKIX_ED25519 - Ed 25519 */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+ return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+ case 1:
+ case "PKCS1_RSA_PKCS1V5":
+ return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+ case 2:
+ case "PKCS1_RSA_PSS":
+ return PublicKeyDetails.PKCS1_RSA_PSS;
+ case 3:
+ case "PKIX_RSA_PKCS1V5":
+ return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+ case 4:
+ case "PKIX_RSA_PSS":
+ return PublicKeyDetails.PKIX_RSA_PSS;
+ case 5:
+ case "PKIX_ECDSA_P256_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+ case 6:
+ case "PKIX_ECDSA_P256_HMAC_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+ case 7:
+ case "PKIX_ED25519":
+ return PublicKeyDetails.PKIX_ED25519;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+ switch (object) {
+ case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+ return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+ case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+ return "PKCS1_RSA_PKCS1V5";
+ case PublicKeyDetails.PKCS1_RSA_PSS:
+ return "PKCS1_RSA_PSS";
+ case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+ return "PKIX_RSA_PKCS1V5";
+ case PublicKeyDetails.PKIX_RSA_PSS:
+ return "PKIX_RSA_PSS";
+ case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+ return "PKIX_ECDSA_P256_SHA_256";
+ case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+ return "PKIX_ECDSA_P256_HMAC_SHA_256";
+ case PublicKeyDetails.PKIX_ED25519:
+ return "PKIX_ED25519";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+ SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+ SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+ SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+ /**
+ * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+ * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+ * for more details.
+ */
+ SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+ return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+ case 1:
+ case "EMAIL":
+ return SubjectAlternativeNameType.EMAIL;
+ case 2:
+ case "URI":
+ return SubjectAlternativeNameType.URI;
+ case 3:
+ case "OTHER_NAME":
+ return SubjectAlternativeNameType.OTHER_NAME;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+ switch (object) {
+ case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+ return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+ case SubjectAlternativeNameType.EMAIL:
+ return "EMAIL";
+ case SubjectAlternativeNameType.URI:
+ return "URI";
+ case SubjectAlternativeNameType.OTHER_NAME:
+ return "OTHER_NAME";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+ return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+ fromJSON(object) {
+ return {
+ algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+ digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+ message.digest !== undefined &&
+ (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseMessageSignature() {
+ return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+ fromJSON(object) {
+ return {
+ messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+ signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.messageDigest !== undefined &&
+ (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+ message.signature !== undefined &&
+ (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseLogId() {
+ return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+ fromJSON(object) {
+ return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.keyId !== undefined &&
+ (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseRFC3161SignedTimestamp() {
+ return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+ fromJSON(object) {
+ return {
+ signedTimestamp: isSet(object.signedTimestamp)
+ ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signedTimestamp !== undefined &&
+ (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBasePublicKey() {
+ return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+ fromJSON(object) {
+ return {
+ rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+ keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+ validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.rawBytes !== undefined &&
+ (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+ message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+ message.validFor !== undefined &&
+ (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+ return obj;
+ },
+};
+function createBasePublicKeyIdentifier() {
+ return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+ fromJSON(object) {
+ return { hint: isSet(object.hint) ? String(object.hint) : "" };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.hint !== undefined && (obj.hint = message.hint);
+ return obj;
+ },
+};
+function createBaseObjectIdentifier() {
+ return { id: [] };
+}
+exports.ObjectIdentifier = {
+ fromJSON(object) {
+ return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.id) {
+ obj.id = message.id.map((e) => Math.round(e));
+ }
+ else {
+ obj.id = [];
+ }
+ return obj;
+ },
+};
+function createBaseObjectIdentifierValuePair() {
+ return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+ fromJSON(object) {
+ return {
+ oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+ value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+ message.value !== undefined &&
+ (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseDistinguishedName() {
+ return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+ fromJSON(object) {
+ return {
+ organization: isSet(object.organization) ? String(object.organization) : "",
+ commonName: isSet(object.commonName) ? String(object.commonName) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.organization !== undefined && (obj.organization = message.organization);
+ message.commonName !== undefined && (obj.commonName = message.commonName);
+ return obj;
+ },
+};
+function createBaseX509Certificate() {
+ return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+ fromJSON(object) {
+ return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.rawBytes !== undefined &&
+ (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseSubjectAlternativeName() {
+ return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+ fromJSON(object) {
+ return {
+ type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+ identity: isSet(object.regexp)
+ ? { $case: "regexp", regexp: String(object.regexp) }
+ : isSet(object.value)
+ ? { $case: "value", value: String(object.value) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+ message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+ message.identity?.$case === "value" && (obj.value = message.identity?.value);
+ return obj;
+ },
+};
+function createBaseX509CertificateChain() {
+ return { certificates: [] };
+}
+exports.X509CertificateChain = {
+ fromJSON(object) {
+ return {
+ certificates: Array.isArray(object?.certificates)
+ ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.certificates) {
+ obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+ }
+ else {
+ obj.certificates = [];
+ }
+ return obj;
+ },
+};
+function createBaseTimeRange() {
+ return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+ fromJSON(object) {
+ return {
+ start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+ end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = message.start.toISOString());
+ message.end !== undefined && (obj.end = message.end.toISOString());
+ return obj;
+ },
+};
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (globalThis.Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (globalThis.Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+function fromTimestamp(t) {
+ let millis = Number(t.seconds) * 1000;
+ millis += t.nanos / 1000000;
+ return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+ if (o instanceof Date) {
+ return o;
+ }
+ else if (typeof o === "string") {
+ return new Date(o);
+ }
+ else {
+ return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts
new file mode 100644
index 0000000000..74eb82513d
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts
@@ -0,0 +1,136 @@
+/// <reference types="node" />
+import { LogId } from "./sigstore_common";
+/** KindVersion contains the entry's kind and api version. */
+export interface KindVersion {
+ /**
+ * Kind is the type of entry being stored in the log.
+ * See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types
+ */
+ kind: string;
+ /** The specific api version of the type. */
+ version: string;
+}
+/**
+ * The checkpoint contains a signature of the tree head (root hash),
+ * size of the tree, the transparency log's unique identifier (log ID),
+ * hostname and the current time.
+ * The result is a string, the format is described here
+ * https://github.com/transparency-dev/formats/blob/main/log/README.md
+ * The details are here https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/util/signed_note.go#L114
+ * The signature has the same format as
+ * InclusionPromise.signed_entry_timestamp. See below for more details.
+ */
+export interface Checkpoint {
+ envelope: string;
+}
+/**
+ * InclusionProof is the proof returned from the transparency log. Can
+ * be used for on line verification against the log.
+ */
+export interface InclusionProof {
+ /** The index of the entry in the log. */
+ logIndex: string;
+ /**
+ * The hash digest stored at the root of the merkle tree at the time
+ * the proof was generated.
+ */
+ rootHash: Buffer;
+ /** The size of the merkle tree at the time the proof was generated. */
+ treeSize: string;
+ /**
+ * A list of hashes required to compute the inclusion proof, sorted
+ * in order from leaf to root.
+ * Not that leaf and root hashes are not included.
+ * The root has is available separately in this message, and the
+ * leaf hash should be calculated by the client.
+ */
+ hashes: Buffer[];
+ /**
+ * Signature of the tree head, as of the time of this proof was
+ * generated. See above info on 'Checkpoint' for more details.
+ */
+ checkpoint: Checkpoint | undefined;
+}
+/**
+ * The inclusion promise is calculated by Rekor. It's calculated as a
+ * signature over a canonical JSON serialization of the persisted entry, the
+ * log ID, log index and the integration timestamp.
+ * See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54
+ * The format of the signature depends on the transparency log's public key.
+ * If the signature algorithm requires a hash function and/or a signature
+ * scheme (e.g. RSA) those has to be retrieved out-of-band from the log's
+ * operators, together with the public key.
+ * This is used to verify the integration timestamp's value and that the log
+ * has promised to include the entry.
+ */
+export interface InclusionPromise {
+ signedEntryTimestamp: Buffer;
+}
+/**
+ * TransparencyLogEntry captures all the details required from Rekor to
+ * reconstruct an entry, given that the payload is provided via other means.
+ * This type can easily be created from the existing response from Rekor.
+ * Future iterations could rely on Rekor returning the minimal set of
+ * attributes (excluding the payload) that are required for verifying the
+ * inclusion promise. The inclusion promise (called SignedEntryTimestamp in
+ * the response from Rekor) is similar to a Signed Certificate Timestamp
+ * as described here https://www.rfc-editor.org/rfc/rfc9162#name-signed-certificate-timestam.
+ */
+export interface TransparencyLogEntry {
+ /** The index of the entry in the log. */
+ logIndex: string;
+ /** The unique identifier of the log. */
+ logId: LogId | undefined;
+ /**
+ * The kind (type) and version of the object associated with this
+ * entry. These values are required to construct the entry during
+ * verification.
+ */
+ kindVersion: KindVersion | undefined;
+ /** The UNIX timestamp from the log when the entry was persisted. */
+ integratedTime: string;
+ /** The inclusion promise/signed entry timestamp from the log. */
+ inclusionPromise: InclusionPromise | undefined;
+ /**
+ * The inclusion proof can be used for online verification that the
+ * entry was appended to the log, and that the log has not been
+ * altered.
+ */
+ inclusionProof: InclusionProof | undefined;
+ /**
+ * The canonicalized transparency log entry, used to reconstruct
+ * the Signed Entry Timestamp (SET) during verification.
+ * The contents of this field are the same as the `body` field in
+ * a Rekor response, meaning that it does **not** include the "full"
+ * canonicalized form (of log index, ID, etc.) which are
+ * exposed as separate fields. The verifier is responsible for
+ * combining the `canonicalized_body`, `log_index`, `log_id`,
+ * and `integrated_time` into the payload that the SET's signature
+ * is generated over.
+ *
+ * Clients MUST verify that the signatured referenced in the
+ * `canonicalized_body` matches the signature provided in the
+ * `Bundle.content`.
+ */
+ canonicalizedBody: Buffer;
+}
+export declare const KindVersion: {
+ fromJSON(object: any): KindVersion;
+ toJSON(message: KindVersion): unknown;
+};
+export declare const Checkpoint: {
+ fromJSON(object: any): Checkpoint;
+ toJSON(message: Checkpoint): unknown;
+};
+export declare const InclusionProof: {
+ fromJSON(object: any): InclusionProof;
+ toJSON(message: InclusionProof): unknown;
+};
+export declare const InclusionPromise: {
+ fromJSON(object: any): InclusionPromise;
+ toJSON(message: InclusionPromise): unknown;
+};
+export declare const TransparencyLogEntry: {
+ fromJSON(object: any): TransparencyLogEntry;
+ toJSON(message: TransparencyLogEntry): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000..bffc7700ed
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+ return { kind: "", version: "" };
+}
+exports.KindVersion = {
+ fromJSON(object) {
+ return {
+ kind: isSet(object.kind) ? String(object.kind) : "",
+ version: isSet(object.version) ? String(object.version) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.kind !== undefined && (obj.kind = message.kind);
+ message.version !== undefined && (obj.version = message.version);
+ return obj;
+ },
+};
+function createBaseCheckpoint() {
+ return { envelope: "" };
+}
+exports.Checkpoint = {
+ fromJSON(object) {
+ return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.envelope !== undefined && (obj.envelope = message.envelope);
+ return obj;
+ },
+};
+function createBaseInclusionProof() {
+ return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+ fromJSON(object) {
+ return {
+ logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+ treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+ hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+ checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+ message.rootHash !== undefined &&
+ (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+ message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+ if (message.hashes) {
+ obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+ }
+ else {
+ obj.hashes = [];
+ }
+ message.checkpoint !== undefined &&
+ (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+ return obj;
+ },
+};
+function createBaseInclusionPromise() {
+ return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+ fromJSON(object) {
+ return {
+ signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+ ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signedEntryTimestamp !== undefined &&
+ (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseTransparencyLogEntry() {
+ return {
+ logIndex: "0",
+ logId: undefined,
+ kindVersion: undefined,
+ integratedTime: "0",
+ inclusionPromise: undefined,
+ inclusionProof: undefined,
+ canonicalizedBody: Buffer.alloc(0),
+ };
+}
+exports.TransparencyLogEntry = {
+ fromJSON(object) {
+ return {
+ logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+ kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+ integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+ inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+ inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+ canonicalizedBody: isSet(object.canonicalizedBody)
+ ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+ message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+ message.kindVersion !== undefined &&
+ (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+ message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+ message.inclusionPromise !== undefined &&
+ (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+ message.inclusionProof !== undefined &&
+ (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+ message.canonicalizedBody !== undefined &&
+ (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+ return obj;
+ },
+};
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (globalThis.Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (globalThis.Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts
new file mode 100644
index 0000000000..152d08f5c6
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts
@@ -0,0 +1,89 @@
+import { DistinguishedName, HashAlgorithm, LogId, PublicKey, TimeRange, X509CertificateChain } from "./sigstore_common";
+/**
+ * TransparencyLogInstance describes the immutable parameters from a
+ * transparency log.
+ * See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters
+ * for more details.
+ * The incluced parameters are the minimal set required to identify a log,
+ * and verify an inclusion promise.
+ */
+export interface TransparencyLogInstance {
+ /** The base URL at which can be used to URLs for the client. */
+ baseUrl: string;
+ /** The hash algorithm used for the Merkle Tree. */
+ hashAlgorithm: HashAlgorithm;
+ /**
+ * The public key used to verify signatures generated by the log.
+ * This attribute contains the signature algorithm used by the log.
+ */
+ publicKey: PublicKey | undefined;
+ /** The unique identifier for this transparency log. */
+ logId: LogId | undefined;
+}
+/**
+ * CertificateAuthority enlists the information required to identify which
+ * CA to use and perform signature verification.
+ */
+export interface CertificateAuthority {
+ /**
+ * The root certificate MUST be self-signed, and so the subject and
+ * issuer are the same.
+ */
+ subject: DistinguishedName | undefined;
+ /** The URI at which the CA can be accessed. */
+ uri: string;
+ /** The certificate chain for this CA. */
+ certChain: X509CertificateChain | undefined;
+ /**
+ * The time the *entire* chain was valid. This is at max the
+ * longest interval when *all* certificates in the chain were valid,
+ * but it MAY be shorter.
+ */
+ validFor: TimeRange | undefined;
+}
+/**
+ * TrustedRoot describes the client's complete set of trusted entities.
+ * How the TrustedRoot is populated is not specified, but can be a
+ * combination of many sources such as TUF repositories, files on disk etc.
+ *
+ * The TrustedRoot is not meant to be used for any artifact verification, only
+ * to capture the complete/global set of trusted verification materials.
+ * When verifying an artifact, based on the artifact and policies, a selection
+ * of keys/authorities are expected to be extracted and provided to the
+ * verification function. This way the set of keys/authorities kan be kept to
+ * a minimal set by the policy to gain better control over what signatures
+ * that are allowed.
+ */
+export interface TrustedRoot {
+ /** MUST be application/vnd.dev.sigstore.trustedroot+json;version=0.1 */
+ mediaType: string;
+ /** A set of trusted Rekor servers. */
+ tlogs: TransparencyLogInstance[];
+ /**
+ * A set of trusted certificate authorites (e.g Fulcio), and any
+ * intermediate certificates they provide.
+ * If a CA is issuing multiple intermediate certificate, each
+ * combination shall be represented as separate chain. I.e, a single
+ * root cert may appear in multiple chains but with different
+ * intermediate and/or leaf certificates.
+ * The certificates are intended to be used for verifying artifact
+ * signatures.
+ */
+ certificateAuthorities: CertificateAuthority[];
+ /** A set of trusted certificate transparency logs. */
+ ctlogs: TransparencyLogInstance[];
+ /** A set of trusted timestamping authorities. */
+ timestampAuthorities: CertificateAuthority[];
+}
+export declare const TransparencyLogInstance: {
+ fromJSON(object: any): TransparencyLogInstance;
+ toJSON(message: TransparencyLogInstance): unknown;
+};
+export declare const CertificateAuthority: {
+ fromJSON(object: any): CertificateAuthority;
+ toJSON(message: CertificateAuthority): unknown;
+};
+export declare const TrustedRoot: {
+ fromJSON(object: any): TrustedRoot;
+ toJSON(message: TrustedRoot): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000..05e566767c
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+ return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+ fromJSON(object) {
+ return {
+ baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+ hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+ publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+ logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+ message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+ message.publicKey !== undefined &&
+ (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+ message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+ return obj;
+ },
+};
+function createBaseCertificateAuthority() {
+ return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+ fromJSON(object) {
+ return {
+ subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+ uri: isSet(object.uri) ? String(object.uri) : "",
+ certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+ validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.subject !== undefined &&
+ (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+ message.uri !== undefined && (obj.uri = message.uri);
+ message.certChain !== undefined &&
+ (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+ message.validFor !== undefined &&
+ (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+ return obj;
+ },
+};
+function createBaseTrustedRoot() {
+ return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+ certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+ ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+ : [],
+ ctlogs: Array.isArray(object?.ctlogs)
+ ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+ : [],
+ timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+ ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+ if (message.tlogs) {
+ obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ }
+ else {
+ obj.tlogs = [];
+ }
+ if (message.certificateAuthorities) {
+ obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ }
+ else {
+ obj.certificateAuthorities = [];
+ }
+ if (message.ctlogs) {
+ obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ }
+ else {
+ obj.ctlogs = [];
+ }
+ if (message.timestampAuthorities) {
+ obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ }
+ else {
+ obj.timestampAuthorities = [];
+ }
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts
new file mode 100644
index 0000000000..8ee32d8e66
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts
@@ -0,0 +1,156 @@
+/// <reference types="node" />
+import { Bundle } from "./sigstore_bundle";
+import { ObjectIdentifierValuePair, PublicKey, SubjectAlternativeName } from "./sigstore_common";
+import { TrustedRoot } from "./sigstore_trustroot";
+/** The identity of a X.509 Certificate signer. */
+export interface CertificateIdentity {
+ /** The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) */
+ issuer: string;
+ san: SubjectAlternativeName | undefined;
+ /**
+ * An unordered list of OIDs that must be verified.
+ * All OID/values provided in this list MUST exactly match against
+ * the values in the certificate for verification to be successful.
+ */
+ oids: ObjectIdentifierValuePair[];
+}
+export interface CertificateIdentities {
+ identities: CertificateIdentity[];
+}
+export interface PublicKeyIdentities {
+ publicKeys: PublicKey[];
+}
+/**
+ * A light-weight set of options/policies for identifying trusted signers,
+ * used during verification of a single artifact.
+ */
+export interface ArtifactVerificationOptions {
+ signers?: {
+ $case: "certificateIdentities";
+ certificateIdentities: CertificateIdentities;
+ } | {
+ $case: "publicKeys";
+ publicKeys: PublicKeyIdentities;
+ };
+ /**
+ * Optional options for artifact transparency log verification.
+ * If none is provided, the default verification options are:
+ * Threshold: 1
+ * Online verification: false
+ * Disable: false
+ */
+ tlogOptions?: ArtifactVerificationOptions_TlogOptions | undefined;
+ /**
+ * Optional options for certificate transparency log verification.
+ * If none is provided, the default verification options are:
+ * Threshold: 1
+ * Detached SCT: false
+ * Disable: false
+ */
+ ctlogOptions?: ArtifactVerificationOptions_CtlogOptions | undefined;
+ /**
+ * Optional options for certificate signed timestamp verification.
+ * If none is provided, the default verification options are:
+ * Threshold: 1
+ * Disable: false
+ */
+ tsaOptions?: ArtifactVerificationOptions_TimestampAuthorityOptions | undefined;
+}
+export interface ArtifactVerificationOptions_TlogOptions {
+ /** Number of transparency logs the entry must appear on. */
+ threshold: number;
+ /** Perform an online inclusion proof. */
+ performOnlineVerification: boolean;
+ /** Disable verification for transparency logs. */
+ disable: boolean;
+}
+export interface ArtifactVerificationOptions_CtlogOptions {
+ /**
+ * The number of ct transparency logs the certificate must
+ * appear on.
+ */
+ threshold: number;
+ /**
+ * Expect detached SCTs.
+ * This is not supported right now as we can't capture an
+ * detached SCT in the bundle.
+ */
+ detachedSct: boolean;
+ /** Disable ct transparency log verification */
+ disable: boolean;
+}
+export interface ArtifactVerificationOptions_TimestampAuthorityOptions {
+ /** The number of signed timestamps that are expected. */
+ threshold: number;
+ /** Disable signed timestamp verification. */
+ disable: boolean;
+}
+export interface Artifact {
+ data?: {
+ $case: "artifactUri";
+ artifactUri: string;
+ } | {
+ $case: "artifact";
+ artifact: Buffer;
+ };
+}
+/**
+ * Input captures all that is needed to call the bundle verification method,
+ * to verify a single artifact referenced by the bundle.
+ */
+export interface Input {
+ /**
+ * The verification materials provided during a bundle verification.
+ * The running process is usually preloaded with a "global"
+ * dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to
+ * verifying an artifact (i.e a bundle), and/or based on current
+ * policy, some selection is expected to happen, to filter out the
+ * exact certificate authority to use, which transparency logs are
+ * relevant etc. The result should b ecaptured in the
+ * `artifact_trust_root`.
+ */
+ artifactTrustRoot: TrustedRoot | undefined;
+ artifactVerificationOptions: ArtifactVerificationOptions | undefined;
+ bundle: Bundle | undefined;
+ /**
+ * If the bundle contains a message signature, the artifact must be
+ * provided.
+ */
+ artifact?: Artifact | undefined;
+}
+export declare const CertificateIdentity: {
+ fromJSON(object: any): CertificateIdentity;
+ toJSON(message: CertificateIdentity): unknown;
+};
+export declare const CertificateIdentities: {
+ fromJSON(object: any): CertificateIdentities;
+ toJSON(message: CertificateIdentities): unknown;
+};
+export declare const PublicKeyIdentities: {
+ fromJSON(object: any): PublicKeyIdentities;
+ toJSON(message: PublicKeyIdentities): unknown;
+};
+export declare const ArtifactVerificationOptions: {
+ fromJSON(object: any): ArtifactVerificationOptions;
+ toJSON(message: ArtifactVerificationOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_TlogOptions: {
+ fromJSON(object: any): ArtifactVerificationOptions_TlogOptions;
+ toJSON(message: ArtifactVerificationOptions_TlogOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_CtlogOptions: {
+ fromJSON(object: any): ArtifactVerificationOptions_CtlogOptions;
+ toJSON(message: ArtifactVerificationOptions_CtlogOptions): unknown;
+};
+export declare const ArtifactVerificationOptions_TimestampAuthorityOptions: {
+ fromJSON(object: any): ArtifactVerificationOptions_TimestampAuthorityOptions;
+ toJSON(message: ArtifactVerificationOptions_TimestampAuthorityOptions): unknown;
+};
+export declare const Artifact: {
+ fromJSON(object: any): Artifact;
+ toJSON(message: Artifact): unknown;
+};
+export declare const Input: {
+ fromJSON(object: any): Input;
+ toJSON(message: Input): unknown;
+};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000..b99a305ba5
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+ return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+ fromJSON(object) {
+ return {
+ issuer: isSet(object.issuer) ? String(object.issuer) : "",
+ san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+ oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.issuer !== undefined && (obj.issuer = message.issuer);
+ message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+ if (message.oids) {
+ obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+ }
+ else {
+ obj.oids = [];
+ }
+ return obj;
+ },
+};
+function createBaseCertificateIdentities() {
+ return { identities: [] };
+}
+exports.CertificateIdentities = {
+ fromJSON(object) {
+ return {
+ identities: Array.isArray(object?.identities)
+ ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.identities) {
+ obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+ }
+ else {
+ obj.identities = [];
+ }
+ return obj;
+ },
+};
+function createBasePublicKeyIdentities() {
+ return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+ fromJSON(object) {
+ return {
+ publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.publicKeys) {
+ obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+ }
+ else {
+ obj.publicKeys = [];
+ }
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions() {
+ return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+ fromJSON(object) {
+ return {
+ signers: isSet(object.certificateIdentities)
+ ? {
+ $case: "certificateIdentities",
+ certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+ }
+ : isSet(object.publicKeys)
+ ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+ : undefined,
+ tlogOptions: isSet(object.tlogOptions)
+ ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+ : undefined,
+ ctlogOptions: isSet(object.ctlogOptions)
+ ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+ : undefined,
+ tsaOptions: isSet(object.tsaOptions)
+ ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signers?.$case === "certificateIdentities" &&
+ (obj.certificateIdentities = message.signers?.certificateIdentities
+ ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+ : undefined);
+ message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+ ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+ : undefined);
+ message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+ ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+ : undefined);
+ message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+ ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+ : undefined);
+ message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+ ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+ : undefined);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+ return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ performOnlineVerification: isSet(object.performOnlineVerification)
+ ? Boolean(object.performOnlineVerification)
+ : false,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.performOnlineVerification !== undefined &&
+ (obj.performOnlineVerification = message.performOnlineVerification);
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+ return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+ return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifact() {
+ return { data: undefined };
+}
+exports.Artifact = {
+ fromJSON(object) {
+ return {
+ data: isSet(object.artifactUri)
+ ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+ : isSet(object.artifact)
+ ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+ message.data?.$case === "artifact" &&
+ (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+ return obj;
+ },
+};
+function createBaseInput() {
+ return {
+ artifactTrustRoot: undefined,
+ artifactVerificationOptions: undefined,
+ bundle: undefined,
+ artifact: undefined,
+ };
+}
+exports.Input = {
+ fromJSON(object) {
+ return {
+ artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+ artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+ ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+ : undefined,
+ bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+ artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.artifactTrustRoot !== undefined &&
+ (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+ message.artifactVerificationOptions !== undefined &&
+ (obj.artifactVerificationOptions = message.artifactVerificationOptions
+ ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+ : undefined);
+ message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+ message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+ return obj;
+ },
+};
+var globalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (globalThis.Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (globalThis.Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts
new file mode 100644
index 0000000000..f87f0aba29
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts
@@ -0,0 +1,6 @@
+export * from './__generated__/envelope';
+export * from './__generated__/sigstore_bundle';
+export * from './__generated__/sigstore_common';
+export * from './__generated__/sigstore_rekor';
+export * from './__generated__/sigstore_trustroot';
+export * from './__generated__/sigstore_verification';
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000..eafb768c48
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000..7cb4aa9c53
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "@sigstore/protobuf-specs",
+ "version": "0.1.0",
+ "description": "code-signing for npm packages",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "build": "tsc"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/protobuf-specs.git"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/sigstore/protobuf-specs/issues"
+ },
+ "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+ "devDependencies": {
+ "@tsconfig/node14": "^1.0.3",
+ "@types/node": "^18.14.0",
+ "typescript": "^4.9.5"
+ },
+ "engines": {
+ "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ }
+}