From b5cca09d1f05141dc109ace01cef31e1fdb4c848 Mon Sep 17 00:00:00 2001 From: Kent Rancourt Date: Wed, 20 Dec 2023 13:07:01 -0500 Subject: [PATCH] feat(verification): add promotion verification mechanisms based on rollouts analysis templates (#1259) Signed-off-by: Kent Signed-off-by: Kent Rancourt --- Makefile | 41 +- api/v1alpha1/labels.go | 6 +- api/v1alpha1/stage_types.go | 83 ++ api/v1alpha1/types.proto | 33 + api/v1alpha1/zz_generated.deepcopy.go | 130 +++ charts/kargo/crds/kargo.akuity.io_stages.yaml | 135 +++ .../templates/controller/cluster-roles.yaml | 17 + cmd/controlplane/controller.go | 112 +- internal/api/types/v1alpha1/types.go | 164 ++- internal/cli/cmd/get/stages.go | 2 + internal/controller/analysis/analysis_runs.go | 149 +++ .../controller/analysis/analysis_runs_test.go | 59 ++ internal/controller/labels.go | 8 +- internal/controller/labels_test.go | 2 +- internal/controller/promotions/promotions.go | 2 + .../rollouts/api/v1alpha1/analysis_helpers.go | 52 + .../api/v1alpha1/analysis_helpers_test.go | 115 +++ .../rollouts/api/v1alpha1/analysis_types.go | 328 ++++++ .../controller/rollouts/api/v1alpha1/doc.go | 10 + .../api/v1alpha1/groupversion_info.go | 32 + .../controller/rollouts/api/v1alpha1/types.go | 6 + .../api/v1alpha1/zz_generated.deepcopy.go | 968 ++++++++++++++++++ internal/controller/stages/health.go | 2 +- internal/controller/stages/stages.go | 144 ++- internal/controller/stages/stages_test.go | 145 ++- internal/controller/stages/verification.go | 433 ++++++++ .../controller/stages/verification_test.go | 777 ++++++++++++++ internal/kargo/kargo.go | 5 +- internal/kargo/kargo_test.go | 5 +- internal/kubeclient/indexer.go | 42 +- internal/kubeclient/indexer_test.go | 101 +- pkg/api/v1alpha1/types.pb.go | 948 +++++++++++++---- .../stages.kargo.akuity.io_v1alpha1.json | 164 +++ ui/src/gen/v1alpha1/types_pb.ts | 276 +++++ 34 files changed, 5202 insertions(+), 294 deletions(-) create mode 100644 internal/controller/analysis/analysis_runs.go create mode 100644 internal/controller/analysis/analysis_runs_test.go create mode 100644 internal/controller/rollouts/api/v1alpha1/analysis_helpers.go create mode 100644 internal/controller/rollouts/api/v1alpha1/analysis_helpers_test.go create mode 100644 internal/controller/rollouts/api/v1alpha1/analysis_types.go create mode 100644 internal/controller/rollouts/api/v1alpha1/doc.go create mode 100644 internal/controller/rollouts/api/v1alpha1/groupversion_info.go create mode 100644 internal/controller/rollouts/api/v1alpha1/types.go create mode 100644 internal/controller/rollouts/api/v1alpha1/zz_generated.deepcopy.go create mode 100644 internal/controller/stages/verification.go create mode 100644 internal/controller/stages/verification_test.go diff --git a/Makefile b/Makefile index 2d3bcea54..438d9790f 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ SHELL ?= /bin/bash ARGO_CD_CHART_VERSION := 5.51.6 +ARGO_ROLLOUTS_CHART_VERSION := 2.32.8 BUF_LINT_ERROR_FORMAT ?= text GO_LINT_ERROR_FORMAT ?= colored-line-number CERT_MANAGER_CHART_VERSION := 1.11.5 @@ -225,7 +226,7 @@ hack-k3d-down: ctlptl delete -f hack/k3d/cluster.yaml .PHONY: hack-install-prereqs -hack-install-prereqs: hack-install-cert-manager hack-install-argocd +hack-install-prereqs: hack-install-cert-manager hack-install-argocd hack-install-argo-rollouts .PHONY: hack-install-cert-manager hack-install-cert-manager: @@ -250,10 +251,27 @@ hack-install-argocd: --set 'configs.params."application\.namespaces"=*' \ --set server.service.type=NodePort \ --set server.service.nodePortHttp=30080 \ + --set server.extensions.enabled=true \ + --set server.extensions.contents[0].name=argo-rollouts \ + --set server.extensions.contents[0].url=https://github.com/argoproj-labs/rollout-extension/releases/download/v0.3.3/extension.tar \ + --wait + +.PHONY: hack-install-argo-rollouts +hack-install-argo-rollouts: + helm upgrade rollouts argo-rollouts \ + --repo https://argoproj.github.io/argo-helm \ + --version $(ARGO_ROLLOUTS_CHART_VERSION) \ + --install \ + --create-namespace \ + --namespace rollouts \ --wait .PHONY: hack-uninstall-prereqs -hack-uninstall-prereqs: hack-uninstall-argocd hack-uninstall-cert-manager +hack-uninstall-prereqs: hack-uninstall-argo-rollouts hack-uninstall-argocd hack-uninstall-cert-manager + +.PHONY: hack-uninstall-argo-rollouts +hack-uninstall-argo-rollouts: + helm delete rollouts --namespace rollouts .PHONY: hack-uninstall-argocd hack-uninstall-argocd: @@ -262,22 +280,3 @@ hack-uninstall-argocd: .PHONY: hack-uninstall-cert-manager hack-uninstall-cert-manager: helm delete cert-manager --namespace cert-manager - -.PHONY: hack-add-rollouts -hack-add-rollouts: - helm upgrade argocd argo-cd \ - --repo https://argoproj.github.io/argo-helm \ - --version $(ARGO_CD_CHART_VERSION) \ - --namespace argocd \ - --reuse-values \ - --set server.extensions.enabled=true \ - --set server.extensions.contents[0].name=argo-rollouts \ - --set server.extensions.contents[0].url=https://github.com/argoproj-labs/rollout-extension/releases/download/v0.2.0/extension.tar \ - --wait - helm upgrade argo-rollouts argo-rollouts \ - --repo https://argoproj.github.io/argo-helm \ - --version 2.20.0 \ - --install \ - --create-namespace \ - --namespace argo-rollouts \ - --wait diff --git a/api/v1alpha1/labels.go b/api/v1alpha1/labels.go index 77921ddc2..58188b00e 100644 --- a/api/v1alpha1/labels.go +++ b/api/v1alpha1/labels.go @@ -1,9 +1,11 @@ package v1alpha1 const ( + AliasLabelKey = "kargo.akuity.io/alias" LabelProjectKey = "kargo.akuity.io/project" + FreightLabelKey = "kargo.akuity.io/freight" + ShardLabelKey = "kargo.akuity.io/shard" + StageLabelKey = "kargo.akuity.io/stage" LabelTrueValue = "true" - - AliasLabelKey = "kargo.akuity.io/alias" ) diff --git a/api/v1alpha1/stage_types.go b/api/v1alpha1/stage_types.go index 3ca4a4a0a..5577e088d 100644 --- a/api/v1alpha1/stage_types.go +++ b/api/v1alpha1/stage_types.go @@ -6,6 +6,20 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) +type StagePhase string + +const ( + // StagePhaseNotApplicable denotes a Stage that has no Freight. + StagePhaseNotApplicable StagePhase = "NotApplicable" + // StagePhaseSteady denotes a Stage that has Freight and is not currently + // being promoted or verified. + StagePhaseSteady StagePhase = "Steady" + // StagePhasePromoting denotes a Stage that is currently being promoted. + StagePhasePromoting StagePhase = "Promoting" + // StagePhaseVerifying denotes a Stage that is currently being verified. + StagePhaseVerifying StagePhase = "Verifying" +) + // +kubebuilder:validation:Enum={ImageAndTag,Tag,ImageAndDigest,Digest} type ImageUpdateValueType string @@ -63,6 +77,7 @@ const ( //+kubebuilder:subresource:status //+kubebuilder:printcolumn:name=Current Freight,type=string,JSONPath=`.status.currentFreight.id` //+kubebuilder:printcolumn:name=Health,type=string,JSONPath=`.status.health.status` +//+kubebuilder:printcolumn:name=Phase,type=string,JSONPath=`.status.phase` //+kubebuilder:printcolumn:name=Age,type=date,JSONPath=`.metadata.creationTimestamp` // Stage is the Kargo API's main type. @@ -97,6 +112,9 @@ type StageSpec struct { // single upstream Stage where they may otherwise have subscribed to multiple // upstream Stages. PromotionMechanisms *PromotionMechanisms `json:"promotionMechanisms,omitempty"` + // Verification describes how to verify a Stage's current Freight is fit for + // promotion downstream. + Verification *Verification `json:"verification,omitempty"` } // Subscriptions describes a Stage's sources of Freight. @@ -416,6 +434,8 @@ type ArgoCDHelmImageUpdate struct { // StageStatus describes a Stages's current and recent Freight, health, and // more. type StageStatus struct { + // Phase describes where the Stage currently is in its lifecycle. + Phase StagePhase `json:"phase,omitempty"` // CurrentFreight is a simplified representation of the Stage's current // Freight describing what is currently deployed to the Stage. CurrentFreight *SimpleFreight `json:"currentFreight,omitempty"` @@ -447,6 +467,9 @@ type SimpleFreight struct { Images []Image `json:"images,omitempty"` // Charts describes specific versions of specific Helm charts. Charts []Chart `json:"charts,omitempty"` + // VerificationInfo is information about any verification process that was + // associated with this Freight for this Stage. + VerificationInfo *VerificationInfo `json:"verificationResult,omitempty"` } type SimpleFreightStack []SimpleFreight @@ -586,3 +609,63 @@ type PromotionInfo struct { // Freight is the freight being promoted Freight SimpleFreight `json:"freight"` } + +// Verification describes how to verify that a Promotion has been successful +// using Argo Rollouts AnalysisTemplates. +type Verification struct { + // AnalysisTemplates is a list of AnalysisTemplates from which AnalysisRuns + // should be created to verify a Stage's current Freight is fit to be promoted + // downstream. + AnalysisTemplates []AnalysisTemplateReference `json:"analysisTemplates,omitempty"` + // AnalysisRunMetadata is contains optional metadata that should be applied to + // all AnalysisRuns. + AnalysisRunMetadata *AnalysisRunMetadata `json:"analysisRunMetadata,omitempty"` + // Args lists arguments that should be added to all AnalysisRuns. + Args []AnalysisRunArgument `json:"args,omitempty"` +} + +// AnalysisTemplateReference is a reference to an AnalysisTemplate. +type AnalysisTemplateReference struct { + // Name is the name of the AnalysisTemplate in the same project/namespace as + // the Stage. + // + //+kubebuilder:validation:Required + Name string `json:"name"` +} + +// AnalysisRunMetadata contains optional metadata that should be applied to all +// AnalysisRuns. +type AnalysisRunMetadata struct { + // Additional labels to apply to an AnalysisRun. + Labels map[string]string `json:"labels,omitempty"` + // Additional annotations to apply to an AnalysisRun. + Annotations map[string]string `json:"annotations,omitempty"` +} + +// AnalysisRunArgument represents an argument to be added to an AnalysisRun. +type AnalysisRunArgument struct { + // Name is the name of the argument. + // + //+kubebuilder:validation:Required + Name string `json:"name"` + // Value is the value of the argument. + // + //+kubebuilder:validation:Required + Value string `json:"value,omitempty"` +} + +// VerificationInfo contains information about the currently running +// Verification process. +type VerificationInfo struct { + AnalysisRun AnalysisRunReference `json:"analysisRun"` +} + +// AnalysisRunReference is a reference to an AnalysisRun. +type AnalysisRunReference struct { + // Namespace is the namespace of the AnalysisRun. + Namespace string `json:"namespace"` + // Name is the name of the AnalysisRun. + Name string `json:"name"` + // Phase is the last observed phase of the AnalysisRun referenced by Name. + Phase string `json:"phase"` +} diff --git a/api/v1alpha1/types.proto b/api/v1alpha1/types.proto index e5895d385..d61a0d049 100644 --- a/api/v1alpha1/types.proto +++ b/api/v1alpha1/types.proto @@ -213,6 +213,7 @@ message StageList { message StageSpec { Subscriptions subscriptions = 1 [json_name = "subscriptions"]; PromotionMechanisms promotion_mechanisms = 2 [json_name = "promotionMechanisms"]; + optional Verification verification = 3 [json_name = "verification"]; } message Freight { @@ -243,6 +244,7 @@ message SimpleFreight { repeated GitCommit commits = 4 [json_name = "commits"]; repeated Image images = 5 [json_name = "images"]; repeated Chart charts = 6 [json_name = "charts"]; + optional VerificationInfo verification_info = 7 [json_name = "verificationInfo"]; } message StageStatus { @@ -251,6 +253,7 @@ message StageStatus { string error = 4 [json_name = "error"]; optional Health health = 5 [json_name = "health"]; optional PromotionInfo current_promotion = 6 [json_name = "currentPromotion"]; + string phase = 7 [json_name = "phase"]; } message StageSubscription { @@ -278,3 +281,33 @@ message WarehouseStatus { string error = 1 [json_name = "error"]; int64 observed_generation = 2 [json_name = "observedGeneration"]; } + +message Verification { + repeated AnalysisTemplateReference analysis_templates = 1 [json_name = "analysisTemplates"]; + optional AnalysisRunMetadata analysis_run_metadata = 2 [json_name = "analysisRunMetadata"]; + repeated AnalysisRunArgument args = 3 [json_name = "args"]; +} + +message AnalysisTemplateReference { + string name = 1 [json_name = "name"]; +} + +message AnalysisRunMetadata { + map labels = 1 [json_name = "labels"]; + map annotations = 2 [json_name = "annotations"]; +} + +message AnalysisRunArgument { + string name = 1 [json_name = "name"]; + string value = 2 [json_name = "value"]; +} + +message VerificationInfo { + AnalysisRunReference analysis_run = 1 [json_name = "analysisRun"]; +} + +message AnalysisRunReference { + string namespace = 1 [json_name = "namespace"]; + string name = 2 [json_name = "name"]; + string phase = 3 [json_name = "phase"]; +} diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index 9e2d745d5..ed2bd99c5 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -8,6 +8,80 @@ import ( "k8s.io/apimachinery/pkg/runtime" ) +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunArgument) DeepCopyInto(out *AnalysisRunArgument) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunArgument. +func (in *AnalysisRunArgument) DeepCopy() *AnalysisRunArgument { + if in == nil { + return nil + } + out := new(AnalysisRunArgument) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunMetadata) DeepCopyInto(out *AnalysisRunMetadata) { + *out = *in + if in.Labels != nil { + in, out := &in.Labels, &out.Labels + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.Annotations != nil { + in, out := &in.Annotations, &out.Annotations + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunMetadata. +func (in *AnalysisRunMetadata) DeepCopy() *AnalysisRunMetadata { + if in == nil { + return nil + } + out := new(AnalysisRunMetadata) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunReference) DeepCopyInto(out *AnalysisRunReference) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunReference. +func (in *AnalysisRunReference) DeepCopy() *AnalysisRunReference { + if in == nil { + return nil + } + out := new(AnalysisRunReference) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisTemplateReference) DeepCopyInto(out *AnalysisTemplateReference) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisTemplateReference. +func (in *AnalysisTemplateReference) DeepCopy() *AnalysisTemplateReference { + if in == nil { + return nil + } + out := new(AnalysisTemplateReference) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ApprovedStage) DeepCopyInto(out *ApprovedStage) { *out = *in @@ -814,6 +888,11 @@ func (in *SimpleFreight) DeepCopyInto(out *SimpleFreight) { *out = make([]Chart, len(*in)) copy(*out, *in) } + if in.VerificationInfo != nil { + in, out := &in.VerificationInfo, &out.VerificationInfo + *out = new(VerificationInfo) + **out = **in + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SimpleFreight. @@ -923,6 +1002,11 @@ func (in *StageSpec) DeepCopyInto(out *StageSpec) { *out = new(PromotionMechanisms) (*in).DeepCopyInto(*out) } + if in.Verification != nil { + in, out := &in.Verification, &out.Verification + *out = new(Verification) + (*in).DeepCopyInto(*out) + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StageSpec. @@ -1007,6 +1091,52 @@ func (in *Subscriptions) DeepCopy() *Subscriptions { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Verification) DeepCopyInto(out *Verification) { + *out = *in + if in.AnalysisTemplates != nil { + in, out := &in.AnalysisTemplates, &out.AnalysisTemplates + *out = make([]AnalysisTemplateReference, len(*in)) + copy(*out, *in) + } + if in.AnalysisRunMetadata != nil { + in, out := &in.AnalysisRunMetadata, &out.AnalysisRunMetadata + *out = new(AnalysisRunMetadata) + (*in).DeepCopyInto(*out) + } + if in.Args != nil { + in, out := &in.Args, &out.Args + *out = make([]AnalysisRunArgument, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Verification. +func (in *Verification) DeepCopy() *Verification { + if in == nil { + return nil + } + out := new(Verification) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *VerificationInfo) DeepCopyInto(out *VerificationInfo) { + *out = *in + out.AnalysisRun = in.AnalysisRun +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VerificationInfo. +func (in *VerificationInfo) DeepCopy() *VerificationInfo { + if in == nil { + return nil + } + out := new(VerificationInfo) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *VerifiedStage) DeepCopyInto(out *VerifiedStage) { *out = *in diff --git a/charts/kargo/crds/kargo.akuity.io_stages.yaml b/charts/kargo/crds/kargo.akuity.io_stages.yaml index 6c30cf19c..121b7bdb9 100644 --- a/charts/kargo/crds/kargo.akuity.io_stages.yaml +++ b/charts/kargo/crds/kargo.akuity.io_stages.yaml @@ -21,6 +21,9 @@ spec: - jsonPath: .status.health.status name: Health type: string + - jsonPath: .status.phase + name: Phase + type: string - jsonPath: .metadata.creationTimestamp name: Age type: date @@ -433,6 +436,59 @@ spec: field is mutually exclusive with the UpstreamStages field. type: string type: object + verification: + description: Verification describes how to verify a Stage's current + Freight is fit for promotion downstream. + properties: + analysisRunMetadata: + description: AnalysisRunMetadata is contains optional metadata + that should be applied to all AnalysisRuns. + properties: + annotations: + additionalProperties: + type: string + description: Additional annotations to apply to an AnalysisRun. + type: object + labels: + additionalProperties: + type: string + description: Additional labels to apply to an AnalysisRun. + type: object + type: object + analysisTemplates: + description: AnalysisTemplates is a list of AnalysisTemplates + from which AnalysisRuns should be created to verify a Stage's + current Freight is fit to be promoted downstream. + items: + description: AnalysisTemplateReference is a reference to an + AnalysisTemplate. + properties: + name: + description: Name is the name of the AnalysisTemplate in + the same project/namespace as the Stage. + type: string + required: + - name + type: object + type: array + args: + description: Args lists arguments that should be added to all + AnalysisRuns. + items: + description: AnalysisRunArgument represents an argument to be + added to an AnalysisRun. + properties: + name: + description: Name is the name of the argument. + type: string + value: + description: Value is the value of the argument. + type: string + required: + - name + type: object + type: array + type: object required: - subscriptions type: object @@ -532,6 +588,31 @@ spec: type: string type: object type: array + verificationResult: + description: VerificationInfo is information about any verification + process that was associated with this Freight for this Stage. + properties: + analysisRun: + description: AnalysisRunReference is a reference to an AnalysisRun. + properties: + name: + description: Name is the name of the AnalysisRun. + type: string + namespace: + description: Namespace is the namespace of the AnalysisRun. + type: string + phase: + description: Phase is the last observed phase of the AnalysisRun + referenced by Name. + type: string + required: + - name + - namespace + - phase + type: object + required: + - analysisRun + type: object type: object currentPromotion: description: CurrentPromotion is a reference to the currently Running @@ -629,6 +710,32 @@ spec: type: string type: object type: array + verificationResult: + description: VerificationInfo is information about any verification + process that was associated with this Freight for this Stage. + properties: + analysisRun: + description: AnalysisRunReference is a reference to an + AnalysisRun. + properties: + name: + description: Name is the name of the AnalysisRun. + type: string + namespace: + description: Namespace is the namespace of the AnalysisRun. + type: string + phase: + description: Phase is the last observed phase of the + AnalysisRun referenced by Name. + type: string + required: + - name + - namespace + - phase + type: object + required: + - analysisRun + type: object type: object name: description: Name is the name of the Promotion @@ -794,6 +901,31 @@ spec: type: string type: object type: array + verificationResult: + description: VerificationInfo is information about any verification + process that was associated with this Freight for this Stage. + properties: + analysisRun: + description: AnalysisRunReference is a reference to an AnalysisRun. + properties: + name: + description: Name is the name of the AnalysisRun. + type: string + namespace: + description: Namespace is the namespace of the AnalysisRun. + type: string + phase: + description: Phase is the last observed phase of the + AnalysisRun referenced by Name. + type: string + required: + - name + - namespace + - phase + type: object + required: + - analysisRun + type: object type: object type: array observedGeneration: @@ -801,6 +933,9 @@ spec: that this Stage status was reconciled against. format: int64 type: integer + phase: + description: Phase describes where the Stage currently is in its lifecycle. + type: string type: object required: - spec diff --git a/charts/kargo/templates/controller/cluster-roles.yaml b/charts/kargo/templates/controller/cluster-roles.yaml index e9b4cc67b..036c16446 100644 --- a/charts/kargo/templates/controller/cluster-roles.yaml +++ b/charts/kargo/templates/controller/cluster-roles.yaml @@ -63,6 +63,14 @@ rules: verbs: - update - patch +- apiGroups: + - argoproj.io + resources: + - analysistemplates + verbs: + - get + - list + - watch --- {{- if not .Values.controller.argocd.watchArgocdNamespaceOnly }} apiVersion: rbac.authorization.k8s.io/v1 @@ -82,5 +90,14 @@ rules: - list - patch - watch +- apiGroups: + - argoproj.io + resources: + - analysisruns + verbs: + - create + - get + - list + - watch {{- end }} {{- end }} diff --git a/cmd/controlplane/controller.go b/cmd/controlplane/controller.go index 24b51503e..a00ff55dd 100644 --- a/cmd/controlplane/controller.go +++ b/cmd/controlplane/controller.go @@ -13,9 +13,11 @@ import ( kargoapi "github.com/akuity/kargo/api/v1alpha1" "github.com/akuity/kargo/internal/api/kubernetes" + "github.com/akuity/kargo/internal/controller/analysis" "github.com/akuity/kargo/internal/controller/applications" argocd "github.com/akuity/kargo/internal/controller/argocd/api/v1alpha1" "github.com/akuity/kargo/internal/controller/promotions" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" "github.com/akuity/kargo/internal/controller/stages" "github.com/akuity/kargo/internal/controller/warehouses" "github.com/akuity/kargo/internal/credentials" @@ -65,6 +67,12 @@ func newControllerCommand() *cobra.Command { "scheme", ) } + if err = rollouts.AddToScheme(scheme); err != nil { + return errors.Wrap( + err, + "error adding Argo Rollouts API to Kargo controller manager scheme", + ) + } if err = kargoapi.AddToScheme(scheme); err != nil { return errors.Wrap( err, @@ -82,15 +90,14 @@ func newControllerCommand() *cobra.Command { } } - var appMgr manager.Manager + var argocdMgr manager.Manager { restCfg, err := kubernetes.GetRestConfig(ctx, os.GetEnv("ARGOCD_KUBECONFIG", "")) if err != nil { return errors.Wrap( err, - "error loading REST config for Argo CD Application controller "+ - "manager", + "error loading REST config for Argo CD controller manager", ) } restCfg.ContentType = runtime.ContentTypeJSON @@ -99,15 +106,14 @@ func newControllerCommand() *cobra.Command { if err = corev1.AddToScheme(scheme); err != nil { return errors.Wrap( err, - "error adding Kubernetes core API to Argo CD Application "+ - "controller manager scheme", + "error adding Kubernetes core API to Argo CD controller "+ + "manager scheme", ) } if err = argocd.AddToScheme(scheme); err != nil { return errors.Wrap( err, - "error adding Kargo API to Argo CD Application controller manager "+ - "scheme", + "error adding Argo CD API to Argo CD controller manager scheme", ) } @@ -117,7 +123,7 @@ func newControllerCommand() *cobra.Command { ) { watchNamespace = os.GetEnv("ARGOCD_NAMESPACE", "argocd") } - if appMgr, err = ctrl.NewManager( + if argocdMgr, err = ctrl.NewManager( restCfg, ctrl.Options{ Scheme: scheme, @@ -132,43 +138,105 @@ func newControllerCommand() *cobra.Command { } } + var rolloutsMgr manager.Manager + { + restCfg, err := + kubernetes.GetRestConfig(ctx, os.GetEnv("ARGOCD_KUBECONFIG", "")) + if err != nil { + return errors.Wrap( + err, + "error loading REST config for Argo Rollouts controller manager", + ) + } + restCfg.ContentType = runtime.ContentTypeJSON + + scheme := runtime.NewScheme() + if err = rollouts.AddToScheme(scheme); err != nil { + return errors.Wrap( + err, + "error adding Argo Rollouts API to Argo Rollouts controller "+ + "manager scheme", + ) + } + + var watchNamespace string // Empty string means all namespaces + if shardName != "" { + // TODO: When NOT sharded, Kargo can simply create AnalysisRun + // resources in the project namespaces. When sharded, AnalysisRun + // resources must be created IN the shard clusters (not the Kargo + // control plane cluster) and project namespaces do not exist in the + // shard clusters. We need a place to put them, so for now we allow + // the user to specify a namespace that that exists on each shard for + // this purpose. Note that the namespace does not need to be the same + // on every shard. This may be one of the weaker points in our tenancy + // model and can stand to be improved. + watchNamespace = os.GetEnv( + "ARGO_ROLLOUTS_ANALYSIS_RUNS_NAMESPACE", + "kargo-analysis-runs", + ) + } + if rolloutsMgr, err = ctrl.NewManager( + restCfg, + ctrl.Options{ + Scheme: scheme, + MetricsBindAddress: "0", + Namespace: watchNamespace, + }, + ); err != nil { + return errors.Wrap( + err, + "error initializing Argo Rollouts AnalysisRun controller manager", + ) + } + } + credentialsDbOpts := make([]credentials.KubernetesDatabaseOption, 0, 1) if types.MustParseBool( os.GetEnv("ARGOCD_ENABLE_CREDENTIAL_BORROWING", "false"), ) { - credentialsDbOpts = append(credentialsDbOpts, credentials.WithArgoClient(appMgr.GetClient())) + credentialsDbOpts = append(credentialsDbOpts, credentials.WithArgoClient(argocdMgr.GetClient())) } credentialsDB := credentials.NewKubernetesDatabase( kargoMgr.GetClient(), credentialsDbOpts..., ) - if err := stages.SetupReconcilerWithManager( + if err := analysis.SetupReconcilerWithManager( ctx, kargoMgr, - appMgr, + rolloutsMgr, shardName, ); err != nil { - return errors.Wrap(err, "error setting up Stages reconciler") + return errors.Wrap(err, "error setting up AnalysisRuns reconciler") + } + + if err := applications.SetupReconcilerWithManager( + ctx, + kargoMgr, + argocdMgr, + shardName, + ); err != nil { + return errors.Wrap(err, "error setting up Applications reconciler") } if err := promotions.SetupReconcilerWithManager( ctx, kargoMgr, - appMgr, + argocdMgr, credentialsDB, shardName, ); err != nil { return errors.Wrap(err, "error setting up Promotions reconciler") } - if err := applications.SetupReconcilerWithManager( + if err := stages.SetupReconcilerWithManager( ctx, kargoMgr, - appMgr, + argocdMgr, + rolloutsMgr, shardName, ); err != nil { - return errors.Wrap(err, "error setting up Applications reconciler") + return errors.Wrap(err, "error setting up Stages reconciler") } if err := warehouses.SetupReconcilerWithManager( @@ -183,6 +251,14 @@ func newControllerCommand() *cobra.Command { wg := sync.WaitGroup{} + wg.Add(1) + go func() { + defer wg.Done() + if err := argocdMgr.Start(ctx); err != nil { + errChan <- errors.Wrap(err, "error starting argo cd manager") + } + }() + wg.Add(1) go func() { defer wg.Done() @@ -194,8 +270,8 @@ func newControllerCommand() *cobra.Command { wg.Add(1) go func() { defer wg.Done() - if err := appMgr.Start(ctx); err != nil { - errChan <- errors.Wrap(err, "error starting argo manager") + if err := rolloutsMgr.Start(ctx); err != nil { + errChan <- errors.Wrap(err, "error starting rollouts manager") } }() diff --git a/internal/api/types/v1alpha1/types.go b/internal/api/types/v1alpha1/types.go index 7a4b093d5..1db54004b 100644 --- a/internal/api/types/v1alpha1/types.go +++ b/internal/api/types/v1alpha1/types.go @@ -54,6 +54,7 @@ func FromStageSpecProto(s *v1alpha1.StageSpec) *kargoapi.StageSpec { return &kargoapi.StageSpec{ Subscriptions: FromSubscriptionsProto(s.GetSubscriptions()), PromotionMechanisms: FromPromotionMechanismsProto(s.GetPromotionMechanisms()), + Verification: FromVerificationProto(s.GetVerification()), } } @@ -66,6 +67,7 @@ func FromStageStatusProto(s *v1alpha1.StageStatus) *kargoapi.StageStatus { history[idx] = *FromSimpleFreightProto(freight) } return &kargoapi.StageStatus{ + Phase: kargoapi.StagePhase(s.GetPhase()), CurrentFreight: FromSimpleFreightProto(s.GetCurrentFreight()), History: history, Health: FromHealthProto(s.GetHealth()), @@ -137,10 +139,11 @@ func FromSimpleFreightProto(s *v1alpha1.SimpleFreight) *kargoapi.SimpleFreight { charts[idx] = *FromChartProto(chart) } return &kargoapi.SimpleFreight{ - ID: s.GetId(), - Commits: commits, - Images: images, - Charts: charts, + ID: s.GetId(), + Commits: commits, + Images: images, + Charts: charts, + VerificationInfo: FromVerificationInfo(s.VerificationInfo), } } @@ -587,6 +590,77 @@ func FromPromotionPolicyProto(p *v1alpha1.PromotionPolicy) *kargoapi.PromotionPo } } +func FromVerificationProto(v *v1alpha1.Verification) *kargoapi.Verification { + if v == nil { + return nil + } + templates := + make([]kargoapi.AnalysisTemplateReference, len(v.AnalysisTemplates)) + for i := range v.AnalysisTemplates { + templates[i] = FromAnalysisTemplateReferenceProto(v.AnalysisTemplates[i]) + } + args := make([]kargoapi.AnalysisRunArgument, len(v.Args)) + for i := range v.Args { + args[i] = FromAnalysisRunArgumentProto(v.Args[i]) + } + return &kargoapi.Verification{ + AnalysisTemplates: templates, + AnalysisRunMetadata: FromAnalysisRunMetadataProto(v.AnalysisRunMetadata), + Args: args, + } +} + +func FromAnalysisTemplateReferenceProto( + a *v1alpha1.AnalysisTemplateReference, +) kargoapi.AnalysisTemplateReference { + return kargoapi.AnalysisTemplateReference{ + Name: a.Name, + } +} + +func FromAnalysisRunMetadataProto( + a *v1alpha1.AnalysisRunMetadata, +) *kargoapi.AnalysisRunMetadata { + if a == nil { + return nil + } + return &kargoapi.AnalysisRunMetadata{ + Labels: a.Labels, + Annotations: a.Annotations, + } +} + +func FromAnalysisRunArgumentProto( + a *v1alpha1.AnalysisRunArgument, +) kargoapi.AnalysisRunArgument { + return kargoapi.AnalysisRunArgument{ + Name: a.Name, + Value: a.Value, + } +} + +func FromVerificationInfo(v *v1alpha1.VerificationInfo) *kargoapi.VerificationInfo { + if v == nil { + return nil + } + k := &kargoapi.VerificationInfo{} + if v.AnalysisRun != nil { + k.AnalysisRun = *FromAnalysisRunReferenceProto(v.AnalysisRun) + } + return k +} + +func FromAnalysisRunReferenceProto(a *v1alpha1.AnalysisRunReference) *kargoapi.AnalysisRunReference { + if a == nil { + return nil + } + return &kargoapi.AnalysisRunReference{ + Namespace: a.Namespace, + Name: a.Name, + Phase: a.Phase, + } +} + func ToStageProto(e kargoapi.Stage) *v1alpha1.Stage { // Status var currentFreight *v1alpha1.SimpleFreight @@ -629,8 +703,10 @@ func ToStageProto(e kargoapi.Stage) *v1alpha1.Stage { Spec: &v1alpha1.StageSpec{ Subscriptions: ToSubscriptionsProto(*e.Spec.Subscriptions), PromotionMechanisms: promotionMechanisms, + Verification: ToVerificationProto(e.Spec.Verification), }, Status: &v1alpha1.StageStatus{ + Phase: string(e.Status.Phase), CurrentFreight: currentFreight, CurrentPromotion: currentPromotion, History: history, @@ -922,11 +998,12 @@ func ToSimpleFreightProto(s kargoapi.SimpleFreight, firstSeen *time.Time) *v1alp charts[idx] = ToChartProto(s.Charts[idx]) } return &v1alpha1.SimpleFreight{ - Id: s.ID, - FirstSeen: firstSeenProto, - Commits: commits, - Images: images, - Charts: charts, + Id: s.ID, + FirstSeen: firstSeenProto, + Commits: commits, + Images: images, + Charts: charts, + VerificationInfo: ToVerificationInfoProto(s.VerificationInfo), } } @@ -1065,3 +1142,72 @@ func ToVersionProto(v version.Version) *svcv1alpha1.VersionInfo { Platform: v.Platform, } } + +func ToVerificationProto(v *kargoapi.Verification) *v1alpha1.Verification { + if v == nil { + return nil + } + templates := + make([]*v1alpha1.AnalysisTemplateReference, len(v.AnalysisTemplates)) + for i := range v.AnalysisTemplates { + templates[i] = ToAnalysisTemplateReferenceProto(v.AnalysisTemplates[i]) + } + args := make([]*v1alpha1.AnalysisRunArgument, len(v.Args)) + for i := range v.Args { + args[i] = ToAnalysisRunArgumentProto(v.Args[i]) + } + return &v1alpha1.Verification{ + AnalysisTemplates: templates, + AnalysisRunMetadata: ToAnalysisRunMetadataProto(v.AnalysisRunMetadata), + Args: args, + } +} + +func ToAnalysisTemplateReferenceProto( + a kargoapi.AnalysisTemplateReference, +) *v1alpha1.AnalysisTemplateReference { + return &v1alpha1.AnalysisTemplateReference{ + Name: a.Name, + } +} + +func ToAnalysisRunMetadataProto( + a *kargoapi.AnalysisRunMetadata, +) *v1alpha1.AnalysisRunMetadata { + if a == nil { + return nil + } + return &v1alpha1.AnalysisRunMetadata{ + Labels: a.Labels, + Annotations: a.Annotations, + } +} + +func ToAnalysisRunArgumentProto( + a kargoapi.AnalysisRunArgument, +) *v1alpha1.AnalysisRunArgument { + return &v1alpha1.AnalysisRunArgument{ + Name: a.Name, + Value: a.Value, + } +} + +func ToVerificationInfoProto(v *kargoapi.VerificationInfo) *v1alpha1.VerificationInfo { + if v == nil { + return nil + } + return &v1alpha1.VerificationInfo{ + AnalysisRun: ToAnalysisRunReferenceProto(&v.AnalysisRun), + } +} + +func ToAnalysisRunReferenceProto(a *kargoapi.AnalysisRunReference) *v1alpha1.AnalysisRunReference { + if a == nil { + return nil + } + return &v1alpha1.AnalysisRunReference{ + Namespace: a.Namespace, + Name: a.Name, + Phase: a.Phase, + } +} diff --git a/internal/cli/cmd/get/stages.go b/internal/cli/cmd/get/stages.go index 5ff705d74..46a426696 100644 --- a/internal/cli/cmd/get/stages.go +++ b/internal/cli/cmd/get/stages.go @@ -100,6 +100,7 @@ func newStageTable(list *metav1.List) *metav1.Table { stage.Name, currentFreightID, health, + stage.Status.Phase, duration.HumanDuration(time.Since(stage.CreationTimestamp.Time)), }, Object: list.Items[i], @@ -110,6 +111,7 @@ func newStageTable(list *metav1.List) *metav1.Table { {Name: "Name", Type: "string"}, {Name: "Current Freight", Type: "string"}, {Name: "Health", Type: "string"}, + {Name: "Phase", Type: "string"}, {Name: "Age", Type: "string"}, }, Rows: rows, diff --git a/internal/controller/analysis/analysis_runs.go b/internal/controller/analysis/analysis_runs.go new file mode 100644 index 000000000..56c1fa81b --- /dev/null +++ b/internal/controller/analysis/analysis_runs.go @@ -0,0 +1,149 @@ +package analysis + +import ( + "context" + "fmt" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/fields" + "k8s.io/apimachinery/pkg/runtime" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/event" + "sigs.k8s.io/controller-runtime/pkg/manager" + "sigs.k8s.io/controller-runtime/pkg/predicate" + + kargoapi "github.com/akuity/kargo/api/v1alpha1" + "github.com/akuity/kargo/internal/controller" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" + "github.com/akuity/kargo/internal/kubeclient" + "github.com/akuity/kargo/internal/logging" +) + +// reconciler reconciles Argo Rollouts AnalysisRun resources. +type reconciler struct { + kubeClient client.Client +} + +// SetupReconcilerWithManager initializes a reconciler for Argo Rollouts +// AnalysisRun resources and registers it with the provided Manager. +func SetupReconcilerWithManager( + ctx context.Context, + kargoMgr manager.Manager, + analysisMgr manager.Manager, + shardName string, +) error { + // Index Stages by AnalysisRun + if err := kubeclient.IndexStagesByAnalysisRun(ctx, kargoMgr, shardName); err != nil { + return errors.Wrap(err, "index Stages by Argo Rollouts AnalysisRun") + } + logger := logging.LoggerFromContext(ctx) + return ctrl.NewControllerManagedBy(analysisMgr). + For(&rollouts.AnalysisRun{}). + WithEventFilter(analysisRunPhaseChangePredicate{logger: logger}). + WithOptions(controller.CommonOptions()). + Complete(newReconciler(kargoMgr.GetClient())) +} + +func newReconciler(kubeClient client.Client) *reconciler { + return &reconciler{ + kubeClient: kubeClient, + } +} + +// Reconcile is part of the main Kubernetes reconciliation loop which aims to +// move the current state of the cluster closer to the desired state. +func (r *reconciler) Reconcile( + ctx context.Context, + req ctrl.Request, +) (ctrl.Result, error) { + result := ctrl.Result{} + + logger := logging.LoggerFromContext(ctx).WithFields(log.Fields{ + "analysisRunNamespace": req.NamespacedName.Namespace, + "analysisRun": req.NamespacedName.Name, + }) + logger.Debug("reconciling Argo Rollouts AnalysisRun") + + // Find the Stage associated with this AnalysisRun + stages := &kargoapi.StageList{} + if err := r.kubeClient.List( + ctx, + stages, + &client.ListOptions{ + FieldSelector: fields.OneTermEqualSelector( + kubeclient.StagesByAnalysisRunIndexField, + fmt.Sprintf( + "%s:%s", + req.NamespacedName.Namespace, + req.NamespacedName.Name, + ), + ), + }, + ); err != nil { + return result, errors.Wrapf( + err, + "error listing Stages for AnalysisRun %q in namespace %q", + req.NamespacedName.Name, + req.NamespacedName.Namespace, + ) + } + + // Force associated Stages to reconcile by patching an annotation + errs := make([]error, 0, len(stages.Items)) + for _, e := range stages.Items { + stage := e // This is to sidestep implicit memory aliasing in this for loop + objKey := client.ObjectKey{ + Namespace: stage.Namespace, + Name: stage.Name, + } + _, err := kargoapi.RefreshStage(ctx, r.kubeClient, objKey) + if err != nil { + errs = append(errs, err) + continue + } + logger.WithFields(log.Fields{ + "stageNamespace": stage.Namespace, + "stage": stage.Name, + }).Debug("successfully patched Stage to force reconciliation") + } + if len(errs) > 0 { + return result, errs[0] + } + + return result, nil +} + +type analysisRunPhaseChangePredicate struct { + predicate.Funcs + + logger *log.Entry +} + +// Update implements default UpdateEvent filter for checking if AnalysisRun +// changed phase. +func (a analysisRunPhaseChangePredicate) Update(e event.UpdateEvent) bool { + if e.ObjectOld == nil { + a.logger.Errorf("Update event has no old object to update: %v", e) + return false + } + if e.ObjectNew == nil { + a.logger.Errorf("Update event has no new object for update: %v", e) + return false + } + newUn, err := runtime.DefaultUnstructuredConverter.ToUnstructured(e.ObjectNew) + if err != nil { + a.logger.Errorf("Failed to convert new AnalysisRun: %v", e.ObjectNew) + return false + } + oldUn, err := runtime.DefaultUnstructuredConverter.ToUnstructured(e.ObjectOld) + if err != nil { + a.logger.Errorf("Failed to convert old AnalysisRun: %v", e.ObjectOld) + return false + } + oldPhase, _, _ := unstructured.NestedString(oldUn, "status", "phase") + newPhase, _, _ := unstructured.NestedString(newUn, "status", "phase") + return newPhase != oldPhase +} diff --git a/internal/controller/analysis/analysis_runs_test.go b/internal/controller/analysis/analysis_runs_test.go new file mode 100644 index 000000000..788e888f2 --- /dev/null +++ b/internal/controller/analysis/analysis_runs_test.go @@ -0,0 +1,59 @@ +package analysis + +import ( + "testing" + + "github.com/stretchr/testify/require" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "sigs.k8s.io/controller-runtime/pkg/event" +) + +func TestAnalysisRunPhaseChangePredicate(t *testing.T) { + testCases := []struct { + name string + old map[string]any + new map[string]any + updated bool + }{ + { + name: "phase changed", + old: map[string]any{ + "status": map[string]any{ + "phase": "old-phase", + }, + }, + new: map[string]any{ + "status": map[string]any{ + "phase": "new-phase", + }, + }, + updated: true, + }, + { + name: "phase did not change", + old: map[string]any{ + "status": map[string]any{ + "phase": "old-phase", + }, + }, + new: map[string]any{ + "status": map[string]any{ + "phase": "old-phase", + }, + }, + updated: false, + }, + } + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + p := analysisRunPhaseChangePredicate{} + newUn := &unstructured.Unstructured{Object: testCase.new} + oldUn := &unstructured.Unstructured{Object: testCase.old} + updated := p.Update(event.UpdateEvent{ + ObjectNew: newUn, + ObjectOld: oldUn, + }) + require.Equal(t, testCase.updated, updated) + }) + } +} diff --git a/internal/controller/labels.go b/internal/controller/labels.go index 7893dd2a0..ca34d3b35 100644 --- a/internal/controller/labels.go +++ b/internal/controller/labels.go @@ -5,9 +5,9 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" "sigs.k8s.io/controller-runtime/pkg/predicate" -) -const ShardLabelKey = "kargo.akuity.io/shard" + kargoapi "github.com/akuity/kargo/api/v1alpha1" +) // GetShardPredicate constructs a predicate used as an event filter for various // reconcilers. If a non-empty shard name is passed to this function, it returns @@ -20,7 +20,7 @@ func GetShardPredicate(shard string) (predicate.Predicate, error) { metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { - Key: ShardLabelKey, + Key: kargoapi.ShardLabelKey, Operator: metav1.LabelSelectorOpDoesNotExist, }, }, @@ -32,7 +32,7 @@ func GetShardPredicate(shard string) (predicate.Predicate, error) { *metav1.SetAsLabelSelector( labels.Set( map[string]string{ - ShardLabelKey: shard, + kargoapi.ShardLabelKey: shard, }, ), ), diff --git a/internal/controller/labels_test.go b/internal/controller/labels_test.go index fb7cae2f7..d0f23d129 100644 --- a/internal/controller/labels_test.go +++ b/internal/controller/labels_test.go @@ -20,7 +20,7 @@ func TestGetShardPredicate(t *testing.T) { Object: &kargoapi.Stage{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ - ShardLabelKey: testShardName, + kargoapi.ShardLabelKey: testShardName, }, }, }, diff --git a/internal/controller/promotions/promotions.go b/internal/controller/promotions/promotions.go index d5e4d5154..a22d13d94 100644 --- a/internal/controller/promotions/promotions.go +++ b/internal/controller/promotions/promotions.go @@ -322,6 +322,7 @@ func (r *reconciler) promote( } err = kubeclient.PatchStatus(ctx, r.kargoClient, stage, func(status *kargoapi.StageStatus) { + status.Phase = kargoapi.StagePhasePromoting status.CurrentPromotion = &kargoapi.PromotionInfo{ Name: promo.Name, Freight: simpleTargetFreight, @@ -339,6 +340,7 @@ func (r *reconciler) promote( // The assumption is that controller does not process multiple promotions in one stage // so we are safe from race conditions and can just update the status err = kubeclient.PatchStatus(ctx, r.kargoClient, stage, func(status *kargoapi.StageStatus) { + status.Phase = kargoapi.StagePhaseVerifying status.CurrentPromotion = nil // control-flow Stage history is maintained in Stage controller. // So we only modify history for normal Stages. diff --git a/internal/controller/rollouts/api/v1alpha1/analysis_helpers.go b/internal/controller/rollouts/api/v1alpha1/analysis_helpers.go new file mode 100644 index 000000000..4f60de491 --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/analysis_helpers.go @@ -0,0 +1,52 @@ +package v1alpha1 + +import ( + "context" + + "github.com/pkg/errors" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +// GetAnalysisTemplate returns a pointer to the AnalysisTemplate resource +// specified by the namespacedName argument. If no such resource is found, nil +// is returned instead. +func GetAnalysisTemplate( + ctx context.Context, + c client.Client, + namespacedName types.NamespacedName, +) (*AnalysisTemplate, error) { + at := AnalysisTemplate{} + if err := c.Get(ctx, namespacedName, &at); err != nil { + if err = client.IgnoreNotFound(err); err == nil { + return nil, nil + } + return nil, errors.Wrapf( + err, + "error getting AnalysisTemplate %q in namespace %q", + namespacedName.Name, + namespacedName.Namespace, + ) + } + return &at, nil +} + +func GetAnalysisRun( + ctx context.Context, + c client.Client, + namespacedName types.NamespacedName, +) (*AnalysisRun, error) { + ar := AnalysisRun{} + if err := c.Get(ctx, namespacedName, &ar); err != nil { + if err = client.IgnoreNotFound(err); err == nil { + return nil, nil + } + return nil, errors.Wrapf( + err, + "error getting AnalysisRun %q in namespace %q", + namespacedName.Name, + namespacedName.Namespace, + ) + } + return &ar, nil +} diff --git a/internal/controller/rollouts/api/v1alpha1/analysis_helpers_test.go b/internal/controller/rollouts/api/v1alpha1/analysis_helpers_test.go new file mode 100644 index 000000000..f51d7cb39 --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/analysis_helpers_test.go @@ -0,0 +1,115 @@ +package v1alpha1 + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + k8sruntime "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/client/fake" +) + +func TestGetAnalysisTemplate(t *testing.T) { + scheme := k8sruntime.NewScheme() + require.NoError(t, SchemeBuilder.AddToScheme(scheme)) + + testCases := []struct { + name string + client client.Client + assertions func(*AnalysisTemplate, error) + }{ + { + name: "not found", + client: fake.NewClientBuilder().WithScheme(scheme).Build(), + assertions: func(template *AnalysisTemplate, err error) { + require.NoError(t, err) + require.Nil(t, template) + }, + }, + + { + name: "found", + client: fake.NewClientBuilder().WithScheme(scheme).WithObjects( + &AnalysisTemplate{ + ObjectMeta: metav1.ObjectMeta{ + Name: "fake-template", + Namespace: "fake-namespace", + }, + }, + ).Build(), + assertions: func(template *AnalysisTemplate, err error) { + require.NoError(t, err) + require.Equal(t, "fake-template", template.Name) + require.Equal(t, "fake-namespace", template.Namespace) + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + template, err := GetAnalysisTemplate( + context.Background(), + testCase.client, + types.NamespacedName{ + Namespace: "fake-namespace", + Name: "fake-template", + }, + ) + testCase.assertions(template, err) + }) + } +} + +func TestGetAnalysisRun(t *testing.T) { + scheme := k8sruntime.NewScheme() + require.NoError(t, SchemeBuilder.AddToScheme(scheme)) + + testCases := []struct { + name string + client client.Client + assertions func(*AnalysisRun, error) + }{ + { + name: "not found", + client: fake.NewClientBuilder().WithScheme(scheme).Build(), + assertions: func(run *AnalysisRun, err error) { + require.NoError(t, err) + require.Nil(t, run) + }, + }, + + { + name: "found", + client: fake.NewClientBuilder().WithScheme(scheme).WithObjects( + &AnalysisRun{ + ObjectMeta: metav1.ObjectMeta{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, + ).Build(), + assertions: func(run *AnalysisRun, err error) { + require.NoError(t, err) + require.Equal(t, "fake-run", run.Name) + require.Equal(t, "fake-namespace", run.Namespace) + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + run, err := GetAnalysisRun( + context.Background(), + testCase.client, + types.NamespacedName{ + Namespace: "fake-namespace", + Name: "fake-run", + }, + ) + testCase.assertions(run, err) + }) + } +} diff --git a/internal/controller/rollouts/api/v1alpha1/analysis_types.go b/internal/controller/rollouts/api/v1alpha1/analysis_types.go new file mode 100644 index 000000000..02a41460c --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/analysis_types.go @@ -0,0 +1,328 @@ +package v1alpha1 + +import ( + "encoding/json" + "time" + + batchv1 "k8s.io/api/batch/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + intstrutil "k8s.io/apimachinery/pkg/util/intstr" +) + +//+kubebuilder:object:root=true + +type AnalysisTemplate struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + Spec AnalysisTemplateSpec `json:"spec"` +} + +//+kubebuilder:object:root=true + +type AnalysisTemplateList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata"` + Items []AnalysisTemplate `json:"items"` +} + +type AnalysisTemplateSpec struct { + Metrics []Metric `json:"metrics"` + Args []Argument `json:"args,omitempty"` + DryRun []DryRun `json:"dryRun,omitempty"` + MeasurementRetention []MeasurementRetention `json:"measurementRetention,omitempty"` +} + +type DurationString string + +func (d DurationString) Duration() (time.Duration, error) { + return time.ParseDuration(string(d)) +} + +type Metric struct { + Name string `json:"name"` + Interval DurationString `json:"interval,omitempty"` + InitialDelay DurationString `json:"initialDelay,omitempty"` + Count *intstrutil.IntOrString `json:"count,omitempty"` + SuccessCondition string `json:"successCondition,omitempty"` + FailureCondition string `json:"failureCondition,omitempty"` + FailureLimit *intstrutil.IntOrString `json:"failureLimit,omitempty"` + InconclusiveLimit *intstrutil.IntOrString `json:"inconclusiveLimit,omitempty"` + ConsecutiveErrorLimit *intstrutil.IntOrString `json:"consecutiveErrorLimit,omitempty"` + Provider MetricProvider `json:"provider"` +} + +type DryRun struct { + MetricName string `json:"metricName"` +} + +type MeasurementRetention struct { + MetricName string `json:"metricName"` + Limit int32 `json:"limit"` +} + +type MetricProvider struct { + Prometheus *PrometheusMetric `json:"prometheus,omitempty"` + Kayenta *KayentaMetric `json:"kayenta,omitempty"` + Web *WebMetric `json:"web,omitempty"` + Datadog *DatadogMetric `json:"datadog,omitempty"` + Wavefront *WavefrontMetric `json:"wavefront,omitempty"` + NewRelic *NewRelicMetric `json:"newRelic,omitempty"` + Job *JobMetric `json:"job,omitempty"` + CloudWatch *CloudWatchMetric `json:"cloudWatch,omitempty"` + Graphite *GraphiteMetric `json:"graphite,omitempty"` + Influxdb *InfluxdbMetric `json:"influxdb,omitempty"` + SkyWalking *SkyWalkingMetric `json:"skywalking,omitempty"` + Plugin map[string]json.RawMessage `json:"plugin,omitempty"` +} + +type AnalysisPhase string + +const ( + AnalysisPhasePending AnalysisPhase = "Pending" + AnalysisPhaseRunning AnalysisPhase = "Running" + AnalysisPhaseSuccessful AnalysisPhase = "Successful" + AnalysisPhaseFailed AnalysisPhase = "Failed" + AnalysisPhaseError AnalysisPhase = "Error" + AnalysisPhaseInconclusive AnalysisPhase = "Inconclusive" +) + +// Completed returns whether or not the analysis status is considered completed +func (as AnalysisPhase) Completed() bool { + switch as { + case AnalysisPhaseSuccessful, AnalysisPhaseFailed, AnalysisPhaseError, AnalysisPhaseInconclusive: + return true + } + return false +} + +type PrometheusMetric struct { + Address string `json:"address,omitempty"` + Query string `json:"query,omitempty"` + Authentication Authentication `json:"authentication,omitempty"` + Timeout *int64 `json:"timeout,omitempty"` + Insecure bool `json:"insecure,omitempty"` + Headers []WebMetricHeader `json:"headers,omitempty"` +} + +type Authentication struct { + Sigv4 Sigv4Config `json:"sigv4,omitempty"` + OAuth2 OAuth2Config `json:"oauth2,omitempty"` +} + +type OAuth2Config struct { + TokenURL string `json:"tokenUrl,omitempty"` + ClientID string `json:"clientId,omitempty"` + ClientSecret string `json:"clientSecret,omitempty"` + Scopes []string `json:"scopes,omitempty"` +} + +type Sigv4Config struct { + Region string `json:"region,omitempty"` + Profile string `json:"profile,omitempty"` + RoleARN string `json:"roleArn,omitempty"` +} + +type WavefrontMetric struct { + Address string `json:"address,omitempty"` + Query string `json:"query,omitempty"` +} + +type NewRelicMetric struct { + Profile string `json:"profile,omitempty"` + Query string `json:"query"` +} + +type JobMetric struct { + Metadata metav1.ObjectMeta `json:"metadata,omitempty"` + Spec batchv1.JobSpec `json:"spec"` +} + +type GraphiteMetric struct { + Address string `json:"address,omitempty"` + Query string `json:"query,omitempty"` +} + +type InfluxdbMetric struct { + Profile string `json:"profile,omitempty"` + Query string `json:"query,omitempty"` +} + +type CloudWatchMetric struct { + Interval DurationString `json:"interval,omitempty"` + MetricDataQueries []CloudWatchMetricDataQuery `json:"metricDataQueries"` +} + +type CloudWatchMetricDataQuery struct { + Id string `json:"id,omitempty"` + Expression *string `json:"expression,omitempty"` + Label *string `json:"label,omitempty"` + MetricStat *CloudWatchMetricStat `json:"metricStat,omitempty"` + Period *intstrutil.IntOrString `json:"period,omitempty"` + ReturnData *bool `json:"returnData,omitempty"` +} + +type CloudWatchMetricStat struct { + Metric CloudWatchMetricStatMetric `json:"metric,omitempty"` + Period intstrutil.IntOrString `json:"period,omitempty"` + Stat string `json:"stat,omitempty"` + Unit string `json:"unit,omitempty"` +} + +type CloudWatchMetricStatMetric struct { + Dimensions []CloudWatchMetricStatMetricDimension `json:"dimensions,omitempty"` + MetricName string `json:"metricName,omitempty"` + Namespace *string `json:"namespace,omitempty"` +} + +type CloudWatchMetricStatMetricDimension struct { + Name string `json:"name,omitempty"` + Value string `json:"value,omitempty"` +} + +//+kubebuilder:object:root=true +//+kubebuilder:subresource:status + +type AnalysisRun struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + Spec AnalysisRunSpec `json:"spec"` + Status AnalysisRunStatus `json:"status,omitempty"` +} + +//+kubebuilder:object:root=true + +type AnalysisRunList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata"` + Items []AnalysisRun `json:"items"` +} + +type SkyWalkingMetric struct { + Address string `json:"address,omitempty"` + Query string `json:"query,omitempty"` + Interval DurationString `json:"interval,omitempty"` +} + +type AnalysisRunSpec struct { + Metrics []Metric `json:"metrics"` + Args []Argument `json:"args,omitempty"` + Terminate bool `json:"terminate,omitempty"` + DryRun []DryRun `json:"dryRun,omitempty"` + MeasurementRetention []MeasurementRetention `json:"measurementRetention,omitempty"` +} + +type Argument struct { + Name string `json:"name"` + Value *string `json:"value,omitempty"` + ValueFrom *ValueFrom `json:"valueFrom,omitempty"` +} + +type ValueFrom struct { + SecretKeyRef *SecretKeyRef `json:"secretKeyRef,omitempty"` + FieldRef *FieldRef `json:"fieldRef,omitempty"` +} + +type SecretKeyRef struct { + Name string `json:"name"` + Key string `json:"key"` +} + +type AnalysisRunStatus struct { + Phase AnalysisPhase `json:"phase"` + Message string `json:"message,omitempty"` + MetricResults []MetricResult `json:"metricResults,omitempty"` + StartedAt *metav1.Time `json:"startedAt,omitempty"` + RunSummary RunSummary `json:"runSummary,omitempty"` + DryRunSummary *RunSummary `json:"dryRunSummary,omitempty"` +} + +type RunSummary struct { + Count int32 `json:"count,omitempty"` + Successful int32 `json:"successful,omitempty"` + Failed int32 `json:"failed,omitempty"` + Inconclusive int32 `json:"inconclusive,omitempty"` + Error int32 `json:"error,omitempty"` +} + +type MetricResult struct { + Name string `json:"name"` + Phase AnalysisPhase `json:"phase"` + Measurements []Measurement `json:"measurements,omitempty"` + Message string `json:"message,omitempty"` + Count int32 `json:"count,omitempty"` + Successful int32 `json:"successful,omitempty"` + Failed int32 `json:"failed,omitempty"` + Inconclusive int32 `json:"inconclusive,omitempty"` + Error int32 `json:"error,omitempty"` + ConsecutiveError int32 `json:"consecutiveError,omitempty"` + DryRun bool `json:"dryRun,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +type Measurement struct { + Phase AnalysisPhase `json:"phase"` + Message string `json:"message,omitempty"` + StartedAt *metav1.Time `json:"startedAt,omitempty"` + FinishedAt *metav1.Time `json:"finishedAt,omitempty"` + Value string `json:"value,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` + ResumeAt *metav1.Time `json:"resumeAt,omitempty"` +} + +type KayentaMetric struct { + Address string `json:"address"` + Application string `json:"application"` + CanaryConfigName string `json:"canaryConfigName"` + MetricsAccountName string `json:"metricsAccountName"` + ConfigurationAccountName string `json:"configurationAccountName"` + StorageAccountName string `json:"storageAccountName"` + Threshold KayentaThreshold `json:"threshold"` + Scopes []KayentaScope `json:"scopes"` +} + +type KayentaThreshold struct { + Pass int64 `json:"pass"` + Marginal int64 `json:"marginal"` +} + +type KayentaScope struct { + Name string `json:"name"` + ControlScope ScopeDetail `json:"controlScope"` + ExperimentScope ScopeDetail `json:"experimentScope"` +} + +type ScopeDetail struct { + Scope string `json:"scope"` + Region string `json:"region"` + Step int64 `json:"step"` + Start string `json:"start"` + End string `json:"end"` +} + +type WebMetric struct { + Method WebMetricMethod `json:"method,omitempty"` + // URL is the address of the web metric + URL string `json:"url"` + Headers []WebMetricHeader `json:"headers,omitempty"` + Body string `json:"body,omitempty"` + TimeoutSeconds int64 `json:"timeoutSeconds,omitempty"` + JSONPath string `json:"jsonPath,omitempty"` + Insecure bool `json:"insecure,omitempty"` + JSONBody json.RawMessage `json:"jsonBody,omitempty"` + Authentication Authentication `json:"authentication,omitempty"` +} + +type WebMetricMethod string + +type WebMetricHeader struct { + Key string `json:"key"` + Value string `json:"value"` +} + +type DatadogMetric struct { + Interval DurationString `json:"interval,omitempty"` + Query string `json:"query,omitempty"` + Queries map[string]string `json:"queries,omitempty"` + Formula string `json:"formula,omitempty"` + ApiVersion string `json:"apiVersion,omitempty"` +} diff --git a/internal/controller/rollouts/api/v1alpha1/doc.go b/internal/controller/rollouts/api/v1alpha1/doc.go new file mode 100644 index 000000000..5912a1b97 --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/doc.go @@ -0,0 +1,10 @@ +package v1alpha1 + +// This package reproduces just enough of +// github.com/argoproj/argo-rollouts/pkg/apis/rollouts/v1alpha1 to support Kargo +// without having to incur undesired dependencies on Argo Rollouts, Argo CD, +// GitOps Engine, etc., since these have transitive dependencies on Kubernetes +// and can sometimes hold us back from upgrading important Kubernetes packages. + +// TODO: KR: Once Analysis is fully-integrated into Kargo, many of the fields +// that we don't use can be removed from types in this package. diff --git a/internal/controller/rollouts/api/v1alpha1/groupversion_info.go b/internal/controller/rollouts/api/v1alpha1/groupversion_info.go new file mode 100644 index 000000000..bced3e888 --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/groupversion_info.go @@ -0,0 +1,32 @@ +// +kubebuilder:object:generate=true +// +groupName=argoproj.io +package v1alpha1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/runtime/schema" +) + +var ( + GroupVersion = schema.GroupVersion{ + Group: "argoproj.io", + Version: "v1alpha1", + } + + SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes) + + AddToScheme = SchemeBuilder.AddToScheme +) + +func addKnownTypes(scheme *runtime.Scheme) error { + scheme.AddKnownTypes( + GroupVersion, + &AnalysisTemplate{}, + &AnalysisTemplateList{}, + &AnalysisRun{}, + &AnalysisRunList{}, + ) + metav1.AddToGroupVersion(scheme, GroupVersion) + return nil +} diff --git a/internal/controller/rollouts/api/v1alpha1/types.go b/internal/controller/rollouts/api/v1alpha1/types.go new file mode 100644 index 000000000..e33775b8a --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/types.go @@ -0,0 +1,6 @@ +package v1alpha1 + +type FieldRef struct { + // Required: Path of the field to select in the specified API version + FieldPath string `json:"fieldPath" protobuf:"bytes,1,opt,name=fieldPath"` +} diff --git a/internal/controller/rollouts/api/v1alpha1/zz_generated.deepcopy.go b/internal/controller/rollouts/api/v1alpha1/zz_generated.deepcopy.go new file mode 100644 index 000000000..c6017630f --- /dev/null +++ b/internal/controller/rollouts/api/v1alpha1/zz_generated.deepcopy.go @@ -0,0 +1,968 @@ +//go:build !ignore_autogenerated + +// Code generated by controller-gen. DO NOT EDIT. + +package v1alpha1 + +import ( + "encoding/json" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/util/intstr" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRun) DeepCopyInto(out *AnalysisRun) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRun. +func (in *AnalysisRun) DeepCopy() *AnalysisRun { + if in == nil { + return nil + } + out := new(AnalysisRun) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *AnalysisRun) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunList) DeepCopyInto(out *AnalysisRunList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]AnalysisRun, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunList. +func (in *AnalysisRunList) DeepCopy() *AnalysisRunList { + if in == nil { + return nil + } + out := new(AnalysisRunList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *AnalysisRunList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunSpec) DeepCopyInto(out *AnalysisRunSpec) { + *out = *in + if in.Metrics != nil { + in, out := &in.Metrics, &out.Metrics + *out = make([]Metric, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Args != nil { + in, out := &in.Args, &out.Args + *out = make([]Argument, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.DryRun != nil { + in, out := &in.DryRun, &out.DryRun + *out = make([]DryRun, len(*in)) + copy(*out, *in) + } + if in.MeasurementRetention != nil { + in, out := &in.MeasurementRetention, &out.MeasurementRetention + *out = make([]MeasurementRetention, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunSpec. +func (in *AnalysisRunSpec) DeepCopy() *AnalysisRunSpec { + if in == nil { + return nil + } + out := new(AnalysisRunSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisRunStatus) DeepCopyInto(out *AnalysisRunStatus) { + *out = *in + if in.MetricResults != nil { + in, out := &in.MetricResults, &out.MetricResults + *out = make([]MetricResult, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.StartedAt != nil { + in, out := &in.StartedAt, &out.StartedAt + *out = (*in).DeepCopy() + } + out.RunSummary = in.RunSummary + if in.DryRunSummary != nil { + in, out := &in.DryRunSummary, &out.DryRunSummary + *out = new(RunSummary) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisRunStatus. +func (in *AnalysisRunStatus) DeepCopy() *AnalysisRunStatus { + if in == nil { + return nil + } + out := new(AnalysisRunStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisTemplate) DeepCopyInto(out *AnalysisTemplate) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisTemplate. +func (in *AnalysisTemplate) DeepCopy() *AnalysisTemplate { + if in == nil { + return nil + } + out := new(AnalysisTemplate) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *AnalysisTemplate) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisTemplateList) DeepCopyInto(out *AnalysisTemplateList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]AnalysisTemplate, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisTemplateList. +func (in *AnalysisTemplateList) DeepCopy() *AnalysisTemplateList { + if in == nil { + return nil + } + out := new(AnalysisTemplateList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *AnalysisTemplateList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AnalysisTemplateSpec) DeepCopyInto(out *AnalysisTemplateSpec) { + *out = *in + if in.Metrics != nil { + in, out := &in.Metrics, &out.Metrics + *out = make([]Metric, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Args != nil { + in, out := &in.Args, &out.Args + *out = make([]Argument, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.DryRun != nil { + in, out := &in.DryRun, &out.DryRun + *out = make([]DryRun, len(*in)) + copy(*out, *in) + } + if in.MeasurementRetention != nil { + in, out := &in.MeasurementRetention, &out.MeasurementRetention + *out = make([]MeasurementRetention, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalysisTemplateSpec. +func (in *AnalysisTemplateSpec) DeepCopy() *AnalysisTemplateSpec { + if in == nil { + return nil + } + out := new(AnalysisTemplateSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Argument) DeepCopyInto(out *Argument) { + *out = *in + if in.Value != nil { + in, out := &in.Value, &out.Value + *out = new(string) + **out = **in + } + if in.ValueFrom != nil { + in, out := &in.ValueFrom, &out.ValueFrom + *out = new(ValueFrom) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Argument. +func (in *Argument) DeepCopy() *Argument { + if in == nil { + return nil + } + out := new(Argument) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Authentication) DeepCopyInto(out *Authentication) { + *out = *in + out.Sigv4 = in.Sigv4 + in.OAuth2.DeepCopyInto(&out.OAuth2) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Authentication. +func (in *Authentication) DeepCopy() *Authentication { + if in == nil { + return nil + } + out := new(Authentication) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CloudWatchMetric) DeepCopyInto(out *CloudWatchMetric) { + *out = *in + if in.MetricDataQueries != nil { + in, out := &in.MetricDataQueries, &out.MetricDataQueries + *out = make([]CloudWatchMetricDataQuery, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudWatchMetric. +func (in *CloudWatchMetric) DeepCopy() *CloudWatchMetric { + if in == nil { + return nil + } + out := new(CloudWatchMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CloudWatchMetricDataQuery) DeepCopyInto(out *CloudWatchMetricDataQuery) { + *out = *in + if in.Expression != nil { + in, out := &in.Expression, &out.Expression + *out = new(string) + **out = **in + } + if in.Label != nil { + in, out := &in.Label, &out.Label + *out = new(string) + **out = **in + } + if in.MetricStat != nil { + in, out := &in.MetricStat, &out.MetricStat + *out = new(CloudWatchMetricStat) + (*in).DeepCopyInto(*out) + } + if in.Period != nil { + in, out := &in.Period, &out.Period + *out = new(intstr.IntOrString) + **out = **in + } + if in.ReturnData != nil { + in, out := &in.ReturnData, &out.ReturnData + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudWatchMetricDataQuery. +func (in *CloudWatchMetricDataQuery) DeepCopy() *CloudWatchMetricDataQuery { + if in == nil { + return nil + } + out := new(CloudWatchMetricDataQuery) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CloudWatchMetricStat) DeepCopyInto(out *CloudWatchMetricStat) { + *out = *in + in.Metric.DeepCopyInto(&out.Metric) + out.Period = in.Period +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudWatchMetricStat. +func (in *CloudWatchMetricStat) DeepCopy() *CloudWatchMetricStat { + if in == nil { + return nil + } + out := new(CloudWatchMetricStat) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CloudWatchMetricStatMetric) DeepCopyInto(out *CloudWatchMetricStatMetric) { + *out = *in + if in.Dimensions != nil { + in, out := &in.Dimensions, &out.Dimensions + *out = make([]CloudWatchMetricStatMetricDimension, len(*in)) + copy(*out, *in) + } + if in.Namespace != nil { + in, out := &in.Namespace, &out.Namespace + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudWatchMetricStatMetric. +func (in *CloudWatchMetricStatMetric) DeepCopy() *CloudWatchMetricStatMetric { + if in == nil { + return nil + } + out := new(CloudWatchMetricStatMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CloudWatchMetricStatMetricDimension) DeepCopyInto(out *CloudWatchMetricStatMetricDimension) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudWatchMetricStatMetricDimension. +func (in *CloudWatchMetricStatMetricDimension) DeepCopy() *CloudWatchMetricStatMetricDimension { + if in == nil { + return nil + } + out := new(CloudWatchMetricStatMetricDimension) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DatadogMetric) DeepCopyInto(out *DatadogMetric) { + *out = *in + if in.Queries != nil { + in, out := &in.Queries, &out.Queries + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatadogMetric. +func (in *DatadogMetric) DeepCopy() *DatadogMetric { + if in == nil { + return nil + } + out := new(DatadogMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DryRun) DeepCopyInto(out *DryRun) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DryRun. +func (in *DryRun) DeepCopy() *DryRun { + if in == nil { + return nil + } + out := new(DryRun) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FieldRef) DeepCopyInto(out *FieldRef) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FieldRef. +func (in *FieldRef) DeepCopy() *FieldRef { + if in == nil { + return nil + } + out := new(FieldRef) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GraphiteMetric) DeepCopyInto(out *GraphiteMetric) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GraphiteMetric. +func (in *GraphiteMetric) DeepCopy() *GraphiteMetric { + if in == nil { + return nil + } + out := new(GraphiteMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InfluxdbMetric) DeepCopyInto(out *InfluxdbMetric) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InfluxdbMetric. +func (in *InfluxdbMetric) DeepCopy() *InfluxdbMetric { + if in == nil { + return nil + } + out := new(InfluxdbMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobMetric) DeepCopyInto(out *JobMetric) { + *out = *in + in.Metadata.DeepCopyInto(&out.Metadata) + in.Spec.DeepCopyInto(&out.Spec) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobMetric. +func (in *JobMetric) DeepCopy() *JobMetric { + if in == nil { + return nil + } + out := new(JobMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *KayentaMetric) DeepCopyInto(out *KayentaMetric) { + *out = *in + out.Threshold = in.Threshold + if in.Scopes != nil { + in, out := &in.Scopes, &out.Scopes + *out = make([]KayentaScope, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KayentaMetric. +func (in *KayentaMetric) DeepCopy() *KayentaMetric { + if in == nil { + return nil + } + out := new(KayentaMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *KayentaScope) DeepCopyInto(out *KayentaScope) { + *out = *in + out.ControlScope = in.ControlScope + out.ExperimentScope = in.ExperimentScope +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KayentaScope. +func (in *KayentaScope) DeepCopy() *KayentaScope { + if in == nil { + return nil + } + out := new(KayentaScope) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *KayentaThreshold) DeepCopyInto(out *KayentaThreshold) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KayentaThreshold. +func (in *KayentaThreshold) DeepCopy() *KayentaThreshold { + if in == nil { + return nil + } + out := new(KayentaThreshold) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Measurement) DeepCopyInto(out *Measurement) { + *out = *in + if in.StartedAt != nil { + in, out := &in.StartedAt, &out.StartedAt + *out = (*in).DeepCopy() + } + if in.FinishedAt != nil { + in, out := &in.FinishedAt, &out.FinishedAt + *out = (*in).DeepCopy() + } + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.ResumeAt != nil { + in, out := &in.ResumeAt, &out.ResumeAt + *out = (*in).DeepCopy() + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Measurement. +func (in *Measurement) DeepCopy() *Measurement { + if in == nil { + return nil + } + out := new(Measurement) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MeasurementRetention) DeepCopyInto(out *MeasurementRetention) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MeasurementRetention. +func (in *MeasurementRetention) DeepCopy() *MeasurementRetention { + if in == nil { + return nil + } + out := new(MeasurementRetention) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Metric) DeepCopyInto(out *Metric) { + *out = *in + if in.Count != nil { + in, out := &in.Count, &out.Count + *out = new(intstr.IntOrString) + **out = **in + } + if in.FailureLimit != nil { + in, out := &in.FailureLimit, &out.FailureLimit + *out = new(intstr.IntOrString) + **out = **in + } + if in.InconclusiveLimit != nil { + in, out := &in.InconclusiveLimit, &out.InconclusiveLimit + *out = new(intstr.IntOrString) + **out = **in + } + if in.ConsecutiveErrorLimit != nil { + in, out := &in.ConsecutiveErrorLimit, &out.ConsecutiveErrorLimit + *out = new(intstr.IntOrString) + **out = **in + } + in.Provider.DeepCopyInto(&out.Provider) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Metric. +func (in *Metric) DeepCopy() *Metric { + if in == nil { + return nil + } + out := new(Metric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MetricProvider) DeepCopyInto(out *MetricProvider) { + *out = *in + if in.Prometheus != nil { + in, out := &in.Prometheus, &out.Prometheus + *out = new(PrometheusMetric) + (*in).DeepCopyInto(*out) + } + if in.Kayenta != nil { + in, out := &in.Kayenta, &out.Kayenta + *out = new(KayentaMetric) + (*in).DeepCopyInto(*out) + } + if in.Web != nil { + in, out := &in.Web, &out.Web + *out = new(WebMetric) + (*in).DeepCopyInto(*out) + } + if in.Datadog != nil { + in, out := &in.Datadog, &out.Datadog + *out = new(DatadogMetric) + (*in).DeepCopyInto(*out) + } + if in.Wavefront != nil { + in, out := &in.Wavefront, &out.Wavefront + *out = new(WavefrontMetric) + **out = **in + } + if in.NewRelic != nil { + in, out := &in.NewRelic, &out.NewRelic + *out = new(NewRelicMetric) + **out = **in + } + if in.Job != nil { + in, out := &in.Job, &out.Job + *out = new(JobMetric) + (*in).DeepCopyInto(*out) + } + if in.CloudWatch != nil { + in, out := &in.CloudWatch, &out.CloudWatch + *out = new(CloudWatchMetric) + (*in).DeepCopyInto(*out) + } + if in.Graphite != nil { + in, out := &in.Graphite, &out.Graphite + *out = new(GraphiteMetric) + **out = **in + } + if in.Influxdb != nil { + in, out := &in.Influxdb, &out.Influxdb + *out = new(InfluxdbMetric) + **out = **in + } + if in.SkyWalking != nil { + in, out := &in.SkyWalking, &out.SkyWalking + *out = new(SkyWalkingMetric) + **out = **in + } + if in.Plugin != nil { + in, out := &in.Plugin, &out.Plugin + *out = make(map[string]json.RawMessage, len(*in)) + for key, val := range *in { + var outVal []byte + if val == nil { + (*out)[key] = nil + } else { + inVal := (*in)[key] + in, out := &inVal, &outVal + *out = make(json.RawMessage, len(*in)) + copy(*out, *in) + } + (*out)[key] = outVal + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricProvider. +func (in *MetricProvider) DeepCopy() *MetricProvider { + if in == nil { + return nil + } + out := new(MetricProvider) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MetricResult) DeepCopyInto(out *MetricResult) { + *out = *in + if in.Measurements != nil { + in, out := &in.Measurements, &out.Measurements + *out = make([]Measurement, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Metadata != nil { + in, out := &in.Metadata, &out.Metadata + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricResult. +func (in *MetricResult) DeepCopy() *MetricResult { + if in == nil { + return nil + } + out := new(MetricResult) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NewRelicMetric) DeepCopyInto(out *NewRelicMetric) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NewRelicMetric. +func (in *NewRelicMetric) DeepCopy() *NewRelicMetric { + if in == nil { + return nil + } + out := new(NewRelicMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *OAuth2Config) DeepCopyInto(out *OAuth2Config) { + *out = *in + if in.Scopes != nil { + in, out := &in.Scopes, &out.Scopes + *out = make([]string, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2Config. +func (in *OAuth2Config) DeepCopy() *OAuth2Config { + if in == nil { + return nil + } + out := new(OAuth2Config) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PrometheusMetric) DeepCopyInto(out *PrometheusMetric) { + *out = *in + in.Authentication.DeepCopyInto(&out.Authentication) + if in.Timeout != nil { + in, out := &in.Timeout, &out.Timeout + *out = new(int64) + **out = **in + } + if in.Headers != nil { + in, out := &in.Headers, &out.Headers + *out = make([]WebMetricHeader, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrometheusMetric. +func (in *PrometheusMetric) DeepCopy() *PrometheusMetric { + if in == nil { + return nil + } + out := new(PrometheusMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *RunSummary) DeepCopyInto(out *RunSummary) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunSummary. +func (in *RunSummary) DeepCopy() *RunSummary { + if in == nil { + return nil + } + out := new(RunSummary) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScopeDetail) DeepCopyInto(out *ScopeDetail) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScopeDetail. +func (in *ScopeDetail) DeepCopy() *ScopeDetail { + if in == nil { + return nil + } + out := new(ScopeDetail) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretKeyRef) DeepCopyInto(out *SecretKeyRef) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretKeyRef. +func (in *SecretKeyRef) DeepCopy() *SecretKeyRef { + if in == nil { + return nil + } + out := new(SecretKeyRef) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Sigv4Config) DeepCopyInto(out *Sigv4Config) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Sigv4Config. +func (in *Sigv4Config) DeepCopy() *Sigv4Config { + if in == nil { + return nil + } + out := new(Sigv4Config) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SkyWalkingMetric) DeepCopyInto(out *SkyWalkingMetric) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SkyWalkingMetric. +func (in *SkyWalkingMetric) DeepCopy() *SkyWalkingMetric { + if in == nil { + return nil + } + out := new(SkyWalkingMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ValueFrom) DeepCopyInto(out *ValueFrom) { + *out = *in + if in.SecretKeyRef != nil { + in, out := &in.SecretKeyRef, &out.SecretKeyRef + *out = new(SecretKeyRef) + **out = **in + } + if in.FieldRef != nil { + in, out := &in.FieldRef, &out.FieldRef + *out = new(FieldRef) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValueFrom. +func (in *ValueFrom) DeepCopy() *ValueFrom { + if in == nil { + return nil + } + out := new(ValueFrom) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WavefrontMetric) DeepCopyInto(out *WavefrontMetric) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WavefrontMetric. +func (in *WavefrontMetric) DeepCopy() *WavefrontMetric { + if in == nil { + return nil + } + out := new(WavefrontMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WebMetric) DeepCopyInto(out *WebMetric) { + *out = *in + if in.Headers != nil { + in, out := &in.Headers, &out.Headers + *out = make([]WebMetricHeader, len(*in)) + copy(*out, *in) + } + if in.JSONBody != nil { + in, out := &in.JSONBody, &out.JSONBody + *out = make(json.RawMessage, len(*in)) + copy(*out, *in) + } + in.Authentication.DeepCopyInto(&out.Authentication) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebMetric. +func (in *WebMetric) DeepCopy() *WebMetric { + if in == nil { + return nil + } + out := new(WebMetric) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WebMetricHeader) DeepCopyInto(out *WebMetricHeader) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebMetricHeader. +func (in *WebMetricHeader) DeepCopy() *WebMetricHeader { + if in == nil { + return nil + } + out := new(WebMetricHeader) + in.DeepCopyInto(out) + return out +} diff --git a/internal/controller/stages/health.go b/internal/controller/stages/health.go index a1539901b..45d144181 100644 --- a/internal/controller/stages/health.go +++ b/internal/controller/stages/health.go @@ -32,7 +32,7 @@ func (r *reconciler) checkHealth( app, err := r.getArgoCDAppFn( ctx, - r.argoClient, + r.argocdClient, updates.AppNamespaceOrDefault(), updates.AppName, ) diff --git a/internal/controller/stages/stages.go b/internal/controller/stages/stages.go index 9c0889f93..11a07bb76 100644 --- a/internal/controller/stages/stages.go +++ b/internal/controller/stages/stages.go @@ -20,6 +20,7 @@ import ( kargoapi "github.com/akuity/kargo/api/v1alpha1" "github.com/akuity/kargo/internal/controller" argocd "github.com/akuity/kargo/internal/controller/argocd/api/v1alpha1" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" "github.com/akuity/kargo/internal/kargo" "github.com/akuity/kargo/internal/kubeclient" "github.com/akuity/kargo/internal/logging" @@ -27,8 +28,11 @@ import ( // reconciler reconciles Stage resources. type reconciler struct { - kargoClient client.Client - argoClient client.Client + kargoClient client.Client + argocdClient client.Client + rolloutsClient client.Client + + shardName string // The following behaviors are overridable for testing purposes: @@ -63,6 +67,45 @@ type reconciler struct { // Freight verification: + startVerificationFn func( + context.Context, + *kargoapi.Stage, + ) (*kargoapi.VerificationInfo, error) + + getVerificationInfoFn func( + context.Context, + *kargoapi.Stage, + ) (*kargoapi.VerificationInfo, error) + + getAnalysisTemplateFn func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) + + listAnalysisRunsFn func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error + + buildAnalysisRunFn func( + stage *kargoapi.Stage, + templates []*rollouts.AnalysisTemplate, + ) (*rollouts.AnalysisRun, error) + + createAnalysisRunFn func( + context.Context, + client.Object, + ...client.CreateOption, + ) error + + getAnalysisRunFn func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisRun, error) + getFreightFn func( context.Context, client.Client, @@ -146,7 +189,8 @@ type reconciler struct { func SetupReconcilerWithManager( ctx context.Context, kargoMgr manager.Manager, - argoMgr manager.Manager, + argocdMgr manager.Manager, + rolloutsMgr manager.Manager, shardName string, ) error { // Index Promotions in non-terminal states by Stage @@ -222,7 +266,14 @@ func SetupReconcilerWithManager( WithEventFilter(shardPredicate). WithEventFilter(kargo.IgnoreClearRefreshUpdates{}). WithOptions(controller.CommonOptions()). - Build(newReconciler(kargoMgr.GetClient(), argoMgr.GetClient())) + Build( + newReconciler( + kargoMgr.GetClient(), + argocdMgr.GetClient(), + rolloutsMgr.GetClient(), + shardName, + ), + ) if err != nil { return errors.Wrap(err, "error building Stage reconciler") } @@ -264,10 +315,17 @@ func SetupReconcilerWithManager( return nil } -func newReconciler(kargoClient, argoClient client.Client) *reconciler { +func newReconciler( + kargoClient client.Client, + argocdClient client.Client, + rolloutsClient client.Client, + shardName string, +) *reconciler { r := &reconciler{ - kargoClient: kargoClient, - argoClient: argoClient, + kargoClient: kargoClient, + argocdClient: argocdClient, + rolloutsClient: rolloutsClient, + shardName: shardName, } // The following default behaviors are overridable for testing purposes: // Loop guard: @@ -277,6 +335,13 @@ func newReconciler(kargoClient, argoClient client.Client) *reconciler { r.checkHealthFn = r.checkHealth r.getArgoCDAppFn = argocd.GetApplication // Freight verification: + r.startVerificationFn = r.startVerification + r.getVerificationInfoFn = r.getVerificationInfo + r.getAnalysisTemplateFn = rollouts.GetAnalysisTemplate + r.listAnalysisRunsFn = r.kargoClient.List + r.buildAnalysisRunFn = r.buildAnalysisRun + r.createAnalysisRunFn = r.rolloutsClient.Create + r.getAnalysisRunFn = rollouts.GetAnalysisRun r.getFreightFn = kargoapi.GetFreight r.verifyFreightInStageFn = r.verifyFreightInStage r.patchFreightStatusFn = r.patchFreightStatus @@ -377,6 +442,7 @@ func (r *reconciler) syncControlFlowStage( status := *stage.Status.DeepCopy() status.ObservedGeneration = stage.Generation status.Health = nil // Reset health + status.Phase = kargoapi.StagePhaseNotApplicable status.CurrentPromotion = nil // A Stage without promotion mechanisms shouldn't have a currentFreight. Make @@ -488,9 +554,11 @@ func (r *reconciler) syncNormalStage( status.CurrentPromotion = nil if status.CurrentFreight == nil { - logger.Debug("Stage has no current Freight; no health checks to perform") + status.Phase = kargoapi.StagePhaseNotApplicable + logger.Debug( + "Stage has no current Freight; no health checks or verification to perform", + ) } else { - // Check health and mark current Freight as verified in Stage if applicable freightLogger := logger.WithField("freight", status.CurrentFreight.ID) // Check health @@ -506,9 +574,61 @@ func (r *reconciler) syncNormalStage( freightLogger.Debug("Stage health deemed not applicable") } - // If health is not applicable or healthy, mark the current Freight as - // verified in this Stage - if status.Health == nil || status.Health.Status == kargoapi.HealthStateHealthy { + // Initiate or follow-up on verification if required + if status.Phase == kargoapi.StagePhaseVerifying && stage.Spec.Verification != nil { + if status.CurrentFreight.VerificationInfo == nil { + if status.Health == nil || status.Health.Status == kargoapi.HealthStateHealthy { + // Start verification + verInfo, err := r.startVerificationFn(ctx, stage) + if err != nil { + return status, errors.Wrapf( + err, + "error starting verification process for Stage %q and Freight %q in namespace %q", + stage.Name, + status.CurrentFreight.ID, + stage.Namespace, + ) + } + status.CurrentFreight.VerificationInfo = verInfo + } + } else { + log.Debug("checking verification results") + verInfo, err := r.getVerificationInfoFn(ctx, stage) + if err != nil { + return status, errors.Wrapf( + err, + "error getting verification result for Stage %q and Freight %q in namespace %q", + stage.Name, + status.CurrentFreight.ID, + stage.Namespace, + ) + } + status.CurrentFreight.VerificationInfo = verInfo + switch rollouts.AnalysisPhase(status.CurrentFreight.VerificationInfo.AnalysisRun.Phase) { + case rollouts.AnalysisPhasePending: + log.Debug("verification is pending") + case rollouts.AnalysisPhaseRunning: + log.Debug("verification is running") + case rollouts.AnalysisPhaseSuccessful, + rollouts.AnalysisPhaseFailed, + rollouts.AnalysisPhaseError, + rollouts.AnalysisPhaseInconclusive: + // Verification is complete + status.Phase = kargoapi.StagePhaseSteady + log.Debug("verification is complete") + } + } + } + + // If health is not applicable or healthy + // AND + // Verification is not applicable or successful + // THEN + // Mark the Freight as verified in this Stage + if (status.Health == nil || status.Health.Status == kargoapi.HealthStateHealthy) && + (stage.Spec.Verification == nil || + (status.CurrentFreight.VerificationInfo != nil && + status.CurrentFreight.VerificationInfo.AnalysisRun.Phase == string(rollouts.AnalysisPhaseSuccessful))) { if err := r.verifyFreightInStageFn( ctx, stage.Namespace, diff --git a/internal/controller/stages/stages_test.go b/internal/controller/stages/stages_test.go index ff49ef9ec..37c6503ac 100644 --- a/internal/controller/stages/stages_test.go +++ b/internal/controller/stages/stages_test.go @@ -13,6 +13,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client/fake" kargoapi "github.com/akuity/kargo/api/v1alpha1" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" ) func TestNewReconciler(t *testing.T) { @@ -20,9 +21,11 @@ func TestNewReconciler(t *testing.T) { e := newReconciler( kubeClient, kubeClient, + kubeClient, + "", ) require.NotNil(t, e.kargoClient) - require.NotNil(t, e.argoClient) + require.NotNil(t, e.argocdClient) // Assert that all overridable behaviors were initialized to a default: // Loop guard: require.NotNil(t, e.hasNonTerminalPromotionsFn) @@ -31,6 +34,13 @@ func TestNewReconciler(t *testing.T) { require.NotNil(t, e.checkHealthFn) require.NotNil(t, e.getArgoCDAppFn) // Freight verification: + require.NotNil(t, e.startVerificationFn) + require.NotNil(t, e.getVerificationInfoFn) + require.NotNil(t, e.getAnalysisTemplateFn) + require.NotNil(t, e.listAnalysisRunsFn) + require.NotNil(t, e.buildAnalysisRunFn) + require.NotNil(t, e.createAnalysisRunFn) + require.NotNil(t, e.getAnalysisRunFn) require.NotNil(t, e.getFreightFn) require.NotNil(t, e.verifyFreightInStageFn) require.NotNil(t, e.patchFreightStatusFn) @@ -66,6 +76,9 @@ func TestSyncControlFlowStage(t *testing.T) { Warehouse: "fake-warehouse", }, }, + Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseNotApplicable, + }, }, reconciler: &reconciler{ listFreightFn: func( @@ -98,6 +111,9 @@ func TestSyncControlFlowStage(t *testing.T) { }, }, }, + Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseNotApplicable, + }, }, reconciler: &reconciler{ getAllVerifiedFreightFn: func( @@ -132,6 +148,9 @@ func TestSyncControlFlowStage(t *testing.T) { Warehouse: "fake-warehouse", }, }, + Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseNotApplicable, + }, }, reconciler: &reconciler{ listFreightFn: func( @@ -176,6 +195,7 @@ func TestSyncControlFlowStage(t *testing.T) { }, }, Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseNotApplicable, CurrentFreight: &kargoapi.SimpleFreight{}, Health: &kargoapi.Health{}, }, @@ -288,6 +308,87 @@ func TestSyncNormalStage(t *testing.T) { }, }, + { + name: "error starting verification", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + PromotionMechanisms: &kargoapi.PromotionMechanisms{}, + Verification: &kargoapi.Verification{}, + }, + Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseVerifying, + CurrentFreight: &kargoapi.SimpleFreight{}, + }, + }, + reconciler: &reconciler{ + hasNonTerminalPromotionsFn: noNonTerminalPromotionsFn, + checkHealthFn: func( + context.Context, + kargoapi.SimpleFreight, + []kargoapi.ArgoCDAppUpdate, + ) *kargoapi.Health { + return nil + }, + startVerificationFn: func( + context.Context, + *kargoapi.Stage, + ) (*kargoapi.VerificationInfo, error) { + return nil, errors.New("something went wrong") + }, + }, + assertions: func( + initialStatus kargoapi.StageStatus, + newStatus kargoapi.StageStatus, + err error, + ) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error starting verification process") + // Status should be returned unchanged + require.Equal(t, initialStatus, newStatus) + }, + }, + + { + name: "error checking verification result", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + PromotionMechanisms: &kargoapi.PromotionMechanisms{}, + Verification: &kargoapi.Verification{}, + }, + Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseVerifying, + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{}, + }, + }, + }, + reconciler: &reconciler{ + hasNonTerminalPromotionsFn: noNonTerminalPromotionsFn, + checkHealthFn: func( + context.Context, + kargoapi.SimpleFreight, + []kargoapi.ArgoCDAppUpdate, + ) *kargoapi.Health { + return nil + }, + getVerificationInfoFn: func(ctx context.Context, s *kargoapi.Stage) (*kargoapi.VerificationInfo, error) { + return nil, errors.New("something went wrong") + }, + }, + assertions: func( + initialStatus kargoapi.StageStatus, + newStatus kargoapi.StageStatus, + err error, + ) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error getting verification result") + // Status should be returned unchanged + require.Equal(t, initialStatus, newStatus) + }, + }, + { name: "error marking Freight as verified in Stage", stage: &kargoapi.Stage{ @@ -295,6 +396,7 @@ func TestSyncNormalStage(t *testing.T) { PromotionMechanisms: &kargoapi.PromotionMechanisms{}, }, Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseVerifying, CurrentFreight: &kargoapi.SimpleFreight{}, }, }, @@ -592,6 +694,9 @@ func TestSyncNormalStage(t *testing.T) { }, PromotionMechanisms: &kargoapi.PromotionMechanisms{}, }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{}, + }, }, reconciler: &reconciler{ hasNonTerminalPromotionsFn: noNonTerminalPromotionsFn, @@ -654,6 +759,9 @@ func TestSyncNormalStage(t *testing.T) { }, PromotionMechanisms: &kargoapi.PromotionMechanisms{}, }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{}, + }, }, reconciler: &reconciler{ hasNonTerminalPromotionsFn: noNonTerminalPromotionsFn, @@ -726,10 +834,14 @@ func TestSyncNormalStage(t *testing.T) { Warehouse: "fake-warehouse", }, PromotionMechanisms: &kargoapi.PromotionMechanisms{}, + Verification: &kargoapi.Verification{}, }, Status: kargoapi.StageStatus{ + Phase: kargoapi.StagePhaseVerifying, CurrentPromotion: &kargoapi.PromotionInfo{}, - CurrentFreight: &kargoapi.SimpleFreight{}, + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{}, + }, }, }, reconciler: &reconciler{ @@ -743,6 +855,18 @@ func TestSyncNormalStage(t *testing.T) { Status: kargoapi.HealthStateHealthy, } }, + getVerificationInfoFn: func( + context.Context, + *kargoapi.Stage, + ) (*kargoapi.VerificationInfo, error) { + return &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-analysis-run", + Namespace: "fake-namespace", + Phase: string(rollouts.AnalysisPhaseSuccessful), + }, + }, nil + }, verifyFreightInStageFn: func(context.Context, string, string, string) error { return nil }, @@ -786,8 +910,21 @@ func TestSyncNormalStage(t *testing.T) { ) { require.NoError(t, err) require.Equal(t, int64(42), newStatus.ObservedGeneration) // Set - require.NotNil(t, newStatus.Health) // Set - require.Nil(t, newStatus.CurrentPromotion) // Cleared + require.Equal(t, kargoapi.StagePhaseSteady, newStatus.Phase) + require.NotNil(t, newStatus.Health) // Set + require.Nil(t, newStatus.CurrentPromotion) // Cleared + require.Equal(t, kargoapi.StagePhaseSteady, newStatus.Phase) + require.Equal( + t, + &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-analysis-run", + Namespace: "fake-namespace", + Phase: string(rollouts.AnalysisPhaseSuccessful), + }, + }, + newStatus.CurrentFreight.VerificationInfo, + ) }, }, } diff --git a/internal/controller/stages/verification.go b/internal/controller/stages/verification.go new file mode 100644 index 000000000..e66e0d766 --- /dev/null +++ b/internal/controller/stages/verification.go @@ -0,0 +1,433 @@ +package stages + +import ( + "context" + "fmt" + "strings" + + "github.com/oklog/ulid/v2" + "github.com/pkg/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" + + kargoapi "github.com/akuity/kargo/api/v1alpha1" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" + "github.com/akuity/kargo/internal/logging" +) + +func (r *reconciler) startVerification( + ctx context.Context, + stage *kargoapi.Stage, +) (*kargoapi.VerificationInfo, error) { + logger := logging.LoggerFromContext(ctx) + + namespace := r.getAnalysisRunNamespace(stage) + + // Check for existing AnalysisRun + analysisRuns := rollouts.AnalysisRunList{} + if err := r.listAnalysisRunsFn( + ctx, + &analysisRuns, + &client.ListOptions{ + Namespace: namespace, + LabelSelector: labels.SelectorFromSet( + map[string]string{ + kargoapi.StageLabelKey: stage.Name, + kargoapi.FreightLabelKey: stage.Status.CurrentFreight.ID, + }, + ), + }, + ); err != nil { + return nil, errors.Wrapf( + err, + "error listing AnalysisRuns for Stage %q and Freight %q in namespace %q", + stage.Name, + stage.Status.CurrentFreight.ID, + namespace, + ) + } + if len(analysisRuns.Items) > 0 { + logger.Debug("AnalysisRun already exists for Freight") + return &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: analysisRuns.Items[0].Name, + Namespace: analysisRuns.Items[0].Namespace, + Phase: string(analysisRuns.Items[0].Status.Phase), + }, + }, nil + } + + ver := stage.Spec.Verification + + templates := make([]*rollouts.AnalysisTemplate, len(ver.AnalysisTemplates)) + for i, templateRef := range ver.AnalysisTemplates { + template, err := r.getAnalysisTemplateFn( + ctx, + r.kargoClient, + types.NamespacedName{ + Namespace: stage.Namespace, + Name: templateRef.Name, + }, + ) + if err != nil { + return nil, errors.Wrapf( + err, + "error getting AnalysisTemplate %q in namespace %q", + templateRef.Name, + stage.Namespace, + ) + } + if template == nil { + return nil, errors.Errorf( + "AnalysisTemplate %q in namespace %q not found", + templateRef.Name, + stage.Namespace, + ) + } + templates[i] = template + } + + run, err := r.buildAnalysisRunFn(stage, templates) + if err != nil { + return nil, errors.Wrapf( + err, + "error building AnalysisRun for Stage %q and Freight %q in namespace %q", + stage.Name, + stage.Status.CurrentFreight.ID, + stage.Namespace, + ) + } + if err := r.createAnalysisRunFn(ctx, run); err != nil { + return nil, errors.Wrapf( + err, + "error creating AnalysisRun %q in namespace %q", + run.Name, + run.Namespace, + ) + } + + return &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: run.Name, + Namespace: run.Namespace, + }, + }, nil +} + +func (r *reconciler) getVerificationInfo( + ctx context.Context, + stage *kargoapi.Stage, +) (*kargoapi.VerificationInfo, error) { + namespace := r.getAnalysisRunNamespace(stage) + analysisRunName := stage.Status.CurrentFreight.VerificationInfo.AnalysisRun.Name + analysisRun, err := r.getAnalysisRunFn( + ctx, + r.rolloutsClient, + types.NamespacedName{ + Namespace: namespace, + Name: analysisRunName, + }, + ) + if err != nil { + return nil, errors.Wrapf( + err, + "error getting AnalysisRun %q in namespace %q", + analysisRunName, + namespace, + ) + } + if analysisRun == nil { + return nil, errors.Errorf( + "AnalysisRun %q in namespace %q not found", + analysisRunName, + namespace, + ) + } + return &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: analysisRun.Name, + Namespace: analysisRun.Namespace, + Phase: string(analysisRun.Status.Phase), + }, + }, nil +} + +// getAnalysisRunNamespace infers whether this controller is running in a shard. +// If it is not running in a shard, it returns the namespace of the provided Stage +// as the appropriate namespace for an AnalysisRun. If it is running in a shard, +// it returns the hard-coded namespace "analysis-runs" since Project namespaces +// are not available on the shards. +func (r *reconciler) getAnalysisRunNamespace(stage *kargoapi.Stage) string { + if r.shardName == "" { + return stage.Namespace + } + return "analysis-runs" // TODO: KR: Do not hardcode this +} + +func (r *reconciler) buildAnalysisRun( + stage *kargoapi.Stage, + templates []*rollouts.AnalysisTemplate, +) (*rollouts.AnalysisRun, error) { + // maximum length of the stage name used in the promotion name prefix before it exceeds + // kubernetes resource name limit of 253 + // 253 - 1 (.) - 26 (ulid) - 1 (.) - 7 (sha) = 218 + const maxStageNamePrefixLength = 218 + + // Build the name of the AnalysisRun + shortHash := stage.Status.CurrentFreight.ID + if len(shortHash) > 7 { + shortHash = shortHash[0:7] + } + shortStageName := stage.Name + if len(stage.Name) > maxStageNamePrefixLength { + shortStageName = shortStageName[0:maxStageNamePrefixLength] + } + analysisRunName := strings.ToLower(fmt.Sprintf("%s.%s.%s", shortStageName, ulid.Make(), shortHash)) + + // Build the labels and annotations for the AnalysisRun + var numLabels int + var numAnnotations int + if stage.Spec.Verification.AnalysisRunMetadata != nil { + numLabels = len(stage.Spec.Verification.AnalysisRunMetadata.Labels) + numAnnotations = len(stage.Spec.Verification.AnalysisRunMetadata.Annotations) + } + // Kargo will add up two lbls of its own, so size the map accordingly + lbls := make(map[string]string, numLabels+2) + annotations := make(map[string]string, numAnnotations) + if stage.Spec.Verification.AnalysisRunMetadata != nil { + for k, v := range stage.Spec.Verification.AnalysisRunMetadata.Labels { + lbls[k] = v + } + for k, v := range stage.Spec.Verification.AnalysisRunMetadata.Annotations { + annotations[k] = v + } + } + lbls[kargoapi.StageLabelKey] = stage.Name + lbls[kargoapi.FreightLabelKey] = stage.Status.CurrentFreight.ID + + // Flatten templates into a single template + template, err := flattenTemplates(templates) + if err != nil { + return nil, errors.Wrap(err, "error flattening templates") + } + + // Merge the args from the template with the args from the Stage + rolloutsArgs := make([]rollouts.Argument, len(stage.Spec.Verification.Args)) + for i, argument := range stage.Spec.Verification.Args { + arg := argument // Avoid implicit memory aliasing + rolloutsArgs[i] = rollouts.Argument{ + Name: arg.Name, + Value: &arg.Value, + } + } + mergedArgs, err := mergeArgs(template.Spec.Args, rolloutsArgs) + if err != nil { + return nil, errors.Errorf("error merging arguments") + } + + return &rollouts.AnalysisRun{ + ObjectMeta: metav1.ObjectMeta{ + Name: analysisRunName, + Namespace: r.getAnalysisRunNamespace(stage), + Labels: lbls, + Annotations: annotations, + }, + Spec: rollouts.AnalysisRunSpec{ + Metrics: template.Spec.Metrics, + DryRun: template.Spec.DryRun, + MeasurementRetention: template.Spec.MeasurementRetention, + Args: mergedArgs, + }, + }, nil +} + +func flattenTemplates( + templates []*rollouts.AnalysisTemplate, +) (*rollouts.AnalysisTemplate, error) { + metrics, err := flattenMetrics(templates) + if err != nil { + return nil, err + } + dryRunMetrics, err := flattenDryRunMetrics(templates) + if err != nil { + return nil, err + } + measurementRetentionMetrics, err := + flattenMeasurementRetentionMetrics(templates) + if err != nil { + return nil, err + } + args, err := flattenArgs(templates) + if err != nil { + return nil, err + } + return &rollouts.AnalysisTemplate{ + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: metrics, + DryRun: dryRunMetrics, + MeasurementRetention: measurementRetentionMetrics, + Args: args, + }, + }, nil +} + +func flattenMetrics( + templates []*rollouts.AnalysisTemplate, +) ([]rollouts.Metric, error) { + var combinedMetrics []rollouts.Metric + for _, template := range templates { + combinedMetrics = append(combinedMetrics, template.Spec.Metrics...) + } + metricMap := map[string]bool{} + for _, metric := range combinedMetrics { + if _, ok := metricMap[metric.Name]; ok { + return nil, fmt.Errorf("two metrics have the same name '%s'", metric.Name) + } + metricMap[metric.Name] = true + } + return combinedMetrics, nil +} + +func flattenDryRunMetrics( + templates []*rollouts.AnalysisTemplate, +) ([]rollouts.DryRun, error) { + var combinedDryRunMetrics []rollouts.DryRun + for _, template := range templates { + combinedDryRunMetrics = append(combinedDryRunMetrics, template.Spec.DryRun...) + } + err := validateDryRunMetrics(combinedDryRunMetrics) + if err != nil { + return nil, err + } + return combinedDryRunMetrics, nil +} + +func flattenMeasurementRetentionMetrics( + templates []*rollouts.AnalysisTemplate, +) ([]rollouts.MeasurementRetention, error) { + var combinedMeasurementRetentionMetrics []rollouts.MeasurementRetention + for _, template := range templates { + combinedMeasurementRetentionMetrics = + append(combinedMeasurementRetentionMetrics, template.Spec.MeasurementRetention...) + } + err := validateMeasurementRetentionMetrics(combinedMeasurementRetentionMetrics) + if err != nil { + return nil, err + } + return combinedMeasurementRetentionMetrics, nil +} + +func flattenArgs( + templates []*rollouts.AnalysisTemplate, +) ([]rollouts.Argument, error) { + var combinedArgs []rollouts.Argument + appendOrUpdate := func(newArg rollouts.Argument) error { + for i, prevArg := range combinedArgs { + if prevArg.Name == newArg.Name { + // found two args with same name. verify they have the same value, + // otherwise update the combined args with the new non-nil value + if prevArg.Value != nil && + newArg.Value != nil && + *prevArg.Value != *newArg.Value { + return fmt.Errorf( + "Argument `%s` specified multiple times with different "+ + "values: '%s', '%s'", + prevArg.Name, + *prevArg.Value, + *newArg.Value, + ) + } + // If previous arg value is already set (not nil), it should not be + // replaced by a new arg with a nil value + if prevArg.Value == nil { + combinedArgs[i] = newArg + } + return nil + } + } + combinedArgs = append(combinedArgs, newArg) + return nil + } + for _, template := range templates { + for _, arg := range template.Spec.Args { + if err := appendOrUpdate(arg); err != nil { + return nil, err + } + } + } + return combinedArgs, nil +} + +func validateDryRunMetrics(dryRunMetrics []rollouts.DryRun) error { + metricMap := map[string]bool{} + for _, dryRun := range dryRunMetrics { + if _, ok := metricMap[dryRun.MetricName]; ok { + return fmt.Errorf( + "two Dry-Run metric rules have the same name '%s'", + dryRun.MetricName, + ) + } + metricMap[dryRun.MetricName] = true + } + return nil +} + +func validateMeasurementRetentionMetrics( + measurementRetentionMetrics []rollouts.MeasurementRetention, +) error { + metricMap := map[string]bool{} + for _, measurementRetention := range measurementRetentionMetrics { + if _, ok := metricMap[measurementRetention.MetricName]; ok { + return fmt.Errorf( + "two Measurement Retention metric rules have the same name '%s'", + measurementRetention.MetricName, + ) + } + metricMap[measurementRetention.MetricName] = true + } + return nil +} + +// MergeArgs merges two lists of arguments, the incoming and the templates. If +// there are any unresolved arguments that have no value, raises an error. +func mergeArgs( + incomingArgs []rollouts.Argument, + templateArgs []rollouts.Argument, +) ([]rollouts.Argument, error) { + newArgs := append(templateArgs[:0:0], templateArgs...) + for _, arg := range incomingArgs { + i := findArg(arg.Name, newArgs) + if i >= 0 { + if arg.Value != nil { + newArgs[i].Value = arg.Value + } else if arg.ValueFrom != nil { + newArgs[i].ValueFrom = arg.ValueFrom + } + } + } + err := resolveArgs(newArgs) + if err != nil { + return nil, err + } + return newArgs, nil +} + +func findArg(name string, args []rollouts.Argument) int { + for i, arg := range args { + if arg.Name == name { + return i + } + } + return -1 +} + +func resolveArgs(args []rollouts.Argument) error { + for _, arg := range args { + if arg.Value == nil && arg.ValueFrom == nil { + return fmt.Errorf("args.%s was not resolved", arg.Name) + } + } + return nil +} diff --git a/internal/controller/stages/verification_test.go b/internal/controller/stages/verification_test.go new file mode 100644 index 000000000..949dd0fd0 --- /dev/null +++ b/internal/controller/stages/verification_test.go @@ -0,0 +1,777 @@ +package stages + +import ( + "context" + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/utils/pointer" + "sigs.k8s.io/controller-runtime/pkg/client" + + kargoapi "github.com/akuity/kargo/api/v1alpha1" + rollouts "github.com/akuity/kargo/internal/controller/rollouts/api/v1alpha1" +) + +func TestStarVerification(t *testing.T) { + testCases := []struct { + name string + stage *kargoapi.Stage + reconciler *reconciler + assertions func(*kargoapi.VerificationInfo, error) + }{ + { + name: "error listing AnalysisRuns", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return errors.New("something went wrong") + }, + }, + assertions: func(vi *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error listing AnalysisRuns for Stage") + }, + }, + { + name: "Analysis run already exists", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + _ context.Context, + objList client.ObjectList, + _ ...client.ListOption, + ) error { + analysisRuns, ok := objList.(*rollouts.AnalysisRunList) + require.True(t, ok) + analysisRuns.Items = []rollouts.AnalysisRun{{}} + return nil + }, + }, + assertions: func(_ *kargoapi.VerificationInfo, err error) { + require.NoError(t, err) + }, + }, + { + name: "error getting AnalysisTemplate", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + Verification: &kargoapi.Verification{ + AnalysisTemplates: []kargoapi.AnalysisTemplateReference{{}}, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return nil + }, + getAnalysisTemplateFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) { + return nil, errors.New("something went wrong") + }, + }, + assertions: func(_ *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error getting AnalysisTemplate") + }, + }, + { + name: "AnalysisTemplate not found", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + Verification: &kargoapi.Verification{ + AnalysisTemplates: []kargoapi.AnalysisTemplateReference{{}}, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return nil + }, + getAnalysisTemplateFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) { + return nil, nil + }, + }, + assertions: func(_ *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "AnalysisTemplate") + require.Contains(t, err.Error(), "not found") + }, + }, + { + name: "error building AnalysisRun", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + Verification: &kargoapi.Verification{ + AnalysisTemplates: []kargoapi.AnalysisTemplateReference{{}}, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return nil + }, + getAnalysisTemplateFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) { + return &rollouts.AnalysisTemplate{}, nil + }, + buildAnalysisRunFn: func( + *kargoapi.Stage, + []*rollouts.AnalysisTemplate, + ) (*rollouts.AnalysisRun, error) { + return nil, errors.New("something went wrong") + }, + }, + assertions: func(_ *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error building AnalysisRun for Stage") + }, + }, + { + name: "error creating AnalysisRun", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + Verification: &kargoapi.Verification{ + AnalysisTemplates: []kargoapi.AnalysisTemplateReference{{}}, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return nil + }, + getAnalysisTemplateFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) { + return &rollouts.AnalysisTemplate{}, nil + }, + buildAnalysisRunFn: func( + *kargoapi.Stage, + []*rollouts.AnalysisTemplate, + ) (*rollouts.AnalysisRun, error) { + return &rollouts.AnalysisRun{}, nil + }, + createAnalysisRunFn: func( + context.Context, + client.Object, + ...client.CreateOption, + ) error { + return errors.New("something went wrong") + }, + }, + assertions: func(_ *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error creating AnalysisRun") + }, + }, + { + name: "success", + stage: &kargoapi.Stage{ + Spec: &kargoapi.StageSpec{ + Verification: &kargoapi.Verification{ + AnalysisTemplates: []kargoapi.AnalysisTemplateReference{{}}, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + ID: "fake-id", + }, + }, + }, + reconciler: &reconciler{ + listAnalysisRunsFn: func( + context.Context, + client.ObjectList, + ...client.ListOption, + ) error { + return nil + }, + getAnalysisTemplateFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisTemplate, error) { + return &rollouts.AnalysisTemplate{}, nil + }, + buildAnalysisRunFn: func( + *kargoapi.Stage, + []*rollouts.AnalysisTemplate, + ) (*rollouts.AnalysisRun, error) { + return &rollouts.AnalysisRun{ + ObjectMeta: metav1.ObjectMeta{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, nil + }, + createAnalysisRunFn: func( + context.Context, + client.Object, + ...client.CreateOption, + ) error { + return nil + }, + }, + assertions: func(ver *kargoapi.VerificationInfo, err error) { + require.NoError(t, err) + require.Equal( + t, + &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, + ver, + ) + }, + }, + } + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + testCase.assertions( + testCase.reconciler.startVerification( + context.Background(), + testCase.stage, + ), + ) + }) + } +} + +func TestGetVerificationInfo(t *testing.T) { + testCases := []struct { + name string + stage *kargoapi.Stage + reconciler *reconciler + assertions func(*kargoapi.VerificationInfo, error) + }{ + { + name: "error getting AnalysisRun", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, + }, + }, + }, + reconciler: &reconciler{ + getAnalysisRunFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisRun, error) { + return nil, errors.New("something went wrong") + }, + }, + assertions: func(vi *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "something went wrong") + require.Contains(t, err.Error(), "error getting AnalysisRun") + }, + }, + { + name: "AnalysisRun not found", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, + }, + }, + }, + reconciler: &reconciler{ + getAnalysisRunFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisRun, error) { + return nil, nil + }, + }, + assertions: func(vi *kargoapi.VerificationInfo, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "AnalysisRun") + require.Contains(t, err.Error(), "not found") + }, + }, + { + name: "success", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + }, + }, + }, + }, + reconciler: &reconciler{ + getAnalysisRunFn: func( + context.Context, + client.Client, + types.NamespacedName, + ) (*rollouts.AnalysisRun, error) { + return &rollouts.AnalysisRun{ + ObjectMeta: metav1.ObjectMeta{ + Name: "fake-run", + Namespace: "fake-namespace", + }, + Status: rollouts.AnalysisRunStatus{ + Phase: rollouts.AnalysisPhaseSuccessful, + }, + }, nil + }, + }, + assertions: func(ver *kargoapi.VerificationInfo, err error) { + require.NoError(t, err) + require.Equal( + t, + &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Name: "fake-run", + Namespace: "fake-namespace", + Phase: string(rollouts.AnalysisPhaseSuccessful), + }, + }, + ver, + ) + }, + }, + } + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + testCase.assertions( + testCase.reconciler.getVerificationInfo( + context.Background(), + testCase.stage, + ), + ) + }) + } +} + +func TestFlattenTemplates(t *testing.T) { + metric := func(name, successCondition string) rollouts.Metric { + return rollouts.Metric{ + Name: name, + SuccessCondition: successCondition, + } + } + arg := func(name string, value *string) rollouts.Argument { + return rollouts.Argument{ + Name: name, + Value: value, + } + } + t.Run("Handle empty list", func(t *testing.T) { + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{}) + require.Nil(t, err) + require.Len(t, template.Spec.Metrics, 0) + require.Len(t, template.Spec.Args, 0) + + }) + t.Run("No changes on single template", func(t *testing.T) { + orig := &rollouts.AnalysisTemplate{ + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{metric("foo", "{{args.test}}")}, + Args: []rollouts.Argument{arg("test", pointer.String("true"))}, + }, + } + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{orig}) + require.Nil(t, err) + require.Equal(t, orig.Spec, template.Spec) + }) + t.Run("Merge multiple metrics successfully", func(t *testing.T) { + fooMetric := metric("foo", "true") + barMetric := metric("bar", "true") + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + DryRun: []rollouts.DryRun{{ + MetricName: "foo", + }}, + MeasurementRetention: []rollouts.MeasurementRetention{{ + MetricName: "foo", + }}, + Args: nil, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{barMetric}, + DryRun: []rollouts.DryRun{{ + MetricName: "bar", + }}, + MeasurementRetention: []rollouts.MeasurementRetention{{ + MetricName: "bar", + }}, + Args: nil, + }, + }, + }) + require.Nil(t, err) + require.Nil(t, template.Spec.Args) + require.Len(t, template.Spec.Metrics, 2) + require.Equal(t, fooMetric, template.Spec.Metrics[0]) + require.Equal(t, barMetric, template.Spec.Metrics[1]) + }) + t.Run("Merge analysis templates successfully", func(t *testing.T) { + fooMetric := metric("foo", "true") + barMetric := metric("bar", "true") + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + DryRun: []rollouts.DryRun{ + { + MetricName: "foo", + }, + }, + MeasurementRetention: []rollouts.MeasurementRetention{ + { + MetricName: "foo", + }, + }, + Args: nil, + }, + }, + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{barMetric}, + DryRun: []rollouts.DryRun{ + { + MetricName: "bar", + }, + }, + MeasurementRetention: []rollouts.MeasurementRetention{ + { + MetricName: "bar", + }, + }, + Args: nil, + }, + }, + }) + require.Nil(t, err) + require.Nil(t, template.Spec.Args) + require.Len(t, template.Spec.Metrics, 2) + require.Equal(t, fooMetric, template.Spec.Metrics[0]) + require.Equal(t, barMetric, template.Spec.Metrics[1]) + }) + t.Run("Merge fail with name collision", func(t *testing.T) { + fooMetric := metric("foo", "true") + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + Args: nil, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + Args: nil, + }, + }, + }) + require.Nil(t, template) + require.Equal(t, err, fmt.Errorf("two metrics have the same name 'foo'")) + }) + t.Run("Merge fail with dry-run name collision", func(t *testing.T) { + fooMetric := metric("foo", "true") + barMetric := metric("bar", "true") + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + DryRun: []rollouts.DryRun{ + { + MetricName: "foo", + }, + }, + Args: nil, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{barMetric}, + DryRun: []rollouts.DryRun{ + { + MetricName: "foo", + }, + }, + Args: nil, + }, + }, + }) + require.Nil(t, template) + require.Equal(t, err, fmt.Errorf("two Dry-Run metric rules have the same name 'foo'")) + }) + t.Run("Merge fail with measurement retention metrics name collision", func(t *testing.T) { + fooMetric := metric("foo", "true") + barMetric := metric("bar", "true") + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{fooMetric}, + MeasurementRetention: []rollouts.MeasurementRetention{ + { + MetricName: "foo", + }, + }, + Args: nil, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: []rollouts.Metric{barMetric}, + MeasurementRetention: []rollouts.MeasurementRetention{ + { + MetricName: "foo", + }, + }, + Args: nil, + }, + }, + }) + require.Nil(t, template) + require.Equal(t, err, fmt.Errorf("two Measurement Retention metric rules have the same name 'foo'")) + }) + t.Run("Merge multiple args successfully", func(t *testing.T) { + fooArgs := arg("foo", pointer.String("true")) + barArgs := arg("bar", pointer.String("true")) + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{fooArgs}, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{barArgs}, + }, + }, + }) + require.Nil(t, err) + require.Len(t, template.Spec.Args, 2) + require.Equal(t, fooArgs, template.Spec.Args[0]) + require.Equal(t, barArgs, template.Spec.Args[1]) + }) + t.Run(" Merge args with same name but only one has value", func(t *testing.T) { + fooArgsValue := arg("foo", pointer.String("true")) + fooArgsNoValue := arg("foo", nil) + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{fooArgsValue}, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{fooArgsNoValue}, + }, + }, + }) + require.Nil(t, err) + require.Len(t, template.Spec.Args, 1) + require.Contains(t, template.Spec.Args, fooArgsValue) + }) + t.Run("Error: merge args with same name and both have values", func(t *testing.T) { + fooArgs := arg("foo", pointer.String("true")) + fooArgsWithDiffValue := arg("foo", pointer.String("false")) + template, err := flattenTemplates([]*rollouts.AnalysisTemplate{ + { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{fooArgs}, + }, + }, { + Spec: rollouts.AnalysisTemplateSpec{ + Metrics: nil, + Args: []rollouts.Argument{fooArgsWithDiffValue}, + }, + }, + }) + require.Equal(t, fmt.Errorf("Argument `foo` specified multiple times with different values: 'true', 'false'"), err) + require.Nil(t, template) + }) +} + +func TestMergeArgs(t *testing.T) { + { + // nil list + args, err := mergeArgs(nil, nil) + require.NoError(t, err) + require.Nil(t, args) + } + { + // empty list + args, err := mergeArgs(nil, []rollouts.Argument{}) + require.NoError(t, err) + require.Equal(t, []rollouts.Argument{}, args) + } + { + // use defaults + args, err := mergeArgs( + nil, []rollouts.Argument{ + { + Name: "foo", + Value: pointer.String("bar"), + }, + { + Name: "my-secret", + ValueFrom: &rollouts.ValueFrom{ + SecretKeyRef: &rollouts.SecretKeyRef{ + Name: "name", + Key: "key", + }, + }, + }, + }) + require.NoError(t, err) + require.Len(t, args, 2) + require.Equal(t, "foo", args[0].Name) + require.Equal(t, "bar", *args[0].Value) + require.Equal(t, "my-secret", args[1].Name) + require.NotNil(t, args[1].ValueFrom) + } + { + // overwrite defaults + args, err := mergeArgs( + []rollouts.Argument{ + { + Name: "foo", + Value: pointer.String("overwrite"), + }, + }, []rollouts.Argument{ + { + Name: "foo", + Value: pointer.String("bar"), + }, + }) + require.NoError(t, err) + require.Len(t, args, 1) + require.Equal(t, "foo", args[0].Name) + require.Equal(t, "overwrite", *args[0].Value) + } + { + // not resolved + args, err := mergeArgs( + []rollouts.Argument{ + { + Name: "foo", + }, + }, []rollouts.Argument{ + { + Name: "foo", + }, + }) + require.EqualError(t, err, "args.foo was not resolved") + require.Nil(t, args) + } + { + // extra arg + args, err := mergeArgs( + []rollouts.Argument{ + { + Name: "foo", + Value: pointer.String("my-value"), + }, + { + Name: "extra-arg", + Value: pointer.String("extra-value"), + }, + }, []rollouts.Argument{ + { + Name: "foo", + }, + }) + require.NoError(t, err) + require.Len(t, args, 1) + require.Equal(t, "foo", args[0].Name) + require.Equal(t, "my-value", *args[0].Value) + } +} diff --git a/internal/kargo/kargo.go b/internal/kargo/kargo.go index 5e0a60da1..b2ac52b6b 100644 --- a/internal/kargo/kargo.go +++ b/internal/kargo/kargo.go @@ -11,7 +11,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/predicate" kargoapi "github.com/akuity/kargo/api/v1alpha1" - "github.com/akuity/kargo/internal/controller" ) const ( @@ -50,9 +49,9 @@ func NewPromotion(stage kargoapi.Stage, freight string) kargoapi.Promotion { Freight: freight, }, } - if stage.Labels != nil && stage.Labels[controller.ShardLabelKey] != "" { + if stage.Labels != nil && stage.Labels[kargoapi.ShardLabelKey] != "" { promotion.ObjectMeta.Labels = map[string]string{ - controller.ShardLabelKey: stage.Labels[controller.ShardLabelKey], + kargoapi.ShardLabelKey: stage.Labels[kargoapi.ShardLabelKey], } } return promotion diff --git a/internal/kargo/kargo_test.go b/internal/kargo/kargo_test.go index c707bfd97..543013f75 100644 --- a/internal/kargo/kargo_test.go +++ b/internal/kargo/kargo_test.go @@ -11,7 +11,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/event" kargoapi "github.com/akuity/kargo/api/v1alpha1" - "github.com/akuity/kargo/internal/controller" ) func TestNewPromotion(t *testing.T) { @@ -53,7 +52,7 @@ func TestNewPromotion(t *testing.T) { Name: "test", Namespace: "kargo-demo", Labels: map[string]string{ - controller.ShardLabelKey: "another-shard", + kargoapi.ShardLabelKey: "another-shard", }, }, }, @@ -62,7 +61,7 @@ func TestNewPromotion(t *testing.T) { parts := strings.Split(promo.Name, ".") require.Equal(t, "test", parts[0]) require.Equal(t, testFreight[0:7], parts[2]) - require.Equal(t, "another-shard", promo.Labels[controller.ShardLabelKey]) + require.Equal(t, "another-shard", promo.Labels[kargoapi.ShardLabelKey]) }, }, { diff --git a/internal/kubeclient/indexer.go b/internal/kubeclient/indexer.go index 89d00a8f6..297f671ae 100644 --- a/internal/kubeclient/indexer.go +++ b/internal/kubeclient/indexer.go @@ -10,7 +10,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" kargoapi "github.com/akuity/kargo/api/v1alpha1" - "github.com/akuity/kargo/internal/controller" ) const ( @@ -25,6 +24,7 @@ const ( NonTerminalPromotionsByStageIndexField = "stage" PromotionPoliciesByStageIndexField = "stage" + StagesByAnalysisRunIndexField = "analysisRun" StagesByArgoCDApplicationsIndexField = "applications" StagesByFreightIndexField = "freight" StagesByUpstreamStagesIndexField = "upstreamStages" @@ -34,6 +34,44 @@ const ( ServiceAccountsBySubjectIndexField = "subjects" ) +func IndexStagesByAnalysisRun(ctx context.Context, mgr ctrl.Manager, shardName string) error { + return mgr.GetFieldIndexer().IndexField( + ctx, + &kargoapi.Stage{}, + StagesByAnalysisRunIndexField, + indexStagesByAnalysisRun(shardName)) +} + +func indexStagesByAnalysisRun(shardName string) client.IndexerFunc { + return func(obj client.Object) []string { + // Return early if: + // + // 1. This is the default controller, but the object is labeled for a + // specific shard. + // + // 2. This is a shard-specific controller, but the object is not labeled for + // this shard. + objShardName, labeled := obj.GetLabels()[kargoapi.ShardLabelKey] + if (shardName == "" && labeled) || + (shardName != "" && shardName != objShardName) { + return nil + } + + stage := obj.(*kargoapi.Stage) // nolint: forcetypeassert + if stage.Status.CurrentFreight == nil || + stage.Status.CurrentFreight.VerificationInfo == nil { + return nil + } + return []string{ + fmt.Sprintf( + "%s:%s", + stage.Status.CurrentFreight.VerificationInfo.AnalysisRun.Namespace, + stage.Status.CurrentFreight.VerificationInfo.AnalysisRun.Name, + ), + } + } +} + func IndexStagesByArgoCDApplications(ctx context.Context, mgr ctrl.Manager, shardName string) error { return mgr.GetFieldIndexer().IndexField( ctx, @@ -51,7 +89,7 @@ func indexStagesByArgoCDApplications(shardName string) client.IndexerFunc { // // 2. This is a shard-specific controller, but the object is not labeled for // this shard. - objShardName, labeled := obj.GetLabels()[controller.ShardLabelKey] + objShardName, labeled := obj.GetLabels()[kargoapi.ShardLabelKey] if (shardName == "" && labeled) || (shardName != "" && shardName != objShardName) { return nil diff --git a/internal/kubeclient/indexer_test.go b/internal/kubeclient/indexer_test.go index 8acd1677d..eb506218d 100644 --- a/internal/kubeclient/indexer_test.go +++ b/internal/kubeclient/indexer_test.go @@ -7,9 +7,104 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" kargoapi "github.com/akuity/kargo/api/v1alpha1" - "github.com/akuity/kargo/internal/controller" ) +func TestIndexStagesByAnalysisRun(t *testing.T) { + const testShardName = "test-shard" + t.Parallel() + testCases := []struct { + name string + controllerShardName string + stage *kargoapi.Stage + assertions func(*testing.T, []string) + }{ + { + name: "Stage belongs to another shard", + controllerShardName: testShardName, + stage: &kargoapi.Stage{ + ObjectMeta: metav1.ObjectMeta{ + Labels: map[string]string{ + kargoapi.ShardLabelKey: "another-shard", + }, + }, + }, + assertions: func(t *testing.T, res []string) { + require.Nil(t, res) + }, + }, + { + name: "Stage belongs to this shard", + controllerShardName: testShardName, + stage: &kargoapi.Stage{ + ObjectMeta: metav1.ObjectMeta{ + Labels: map[string]string{ + kargoapi.ShardLabelKey: testShardName, + }, + }, + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Namespace: "fake-namespace", + Name: "fake-analysis-run", + }, + }, + }, + }, + }, + assertions: func(t *testing.T, res []string) { + require.Equal( + t, + []string{ + "fake-namespace:fake-analysis-run", + }, + res, + ) + }, + }, + { + name: "Stage is unlabeled and this is not the default controller", + controllerShardName: testShardName, + stage: &kargoapi.Stage{}, + assertions: func(t *testing.T, res []string) { + require.Nil(t, res) + }, + }, + { + name: "Stage is unlabeled and this is the default controller", + controllerShardName: "", + stage: &kargoapi.Stage{ + Status: kargoapi.StageStatus{ + CurrentFreight: &kargoapi.SimpleFreight{ + VerificationInfo: &kargoapi.VerificationInfo{ + AnalysisRun: kargoapi.AnalysisRunReference{ + Namespace: "fake-namespace", + Name: "fake-analysis-run", + }, + }, + }, + }, + }, + assertions: func(t *testing.T, res []string) { + require.Equal( + t, + []string{ + "fake-namespace:fake-analysis-run", + }, + res, + ) + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + res := indexStagesByAnalysisRun(tc.controllerShardName)(tc.stage) + tc.assertions(t, res) + }) + } +} + func TestIndexStagesByApp(t *testing.T) { const testShardName = "test-shard" t.Parallel() @@ -25,7 +120,7 @@ func TestIndexStagesByApp(t *testing.T) { stage: &kargoapi.Stage{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ - controller.ShardLabelKey: "another-shard", + kargoapi.ShardLabelKey: "another-shard", }, }, Spec: &kargoapi.StageSpec{ @@ -49,7 +144,7 @@ func TestIndexStagesByApp(t *testing.T) { stage: &kargoapi.Stage{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ - controller.ShardLabelKey: testShardName, + kargoapi.ShardLabelKey: testShardName, }, }, Spec: &kargoapi.StageSpec{ diff --git a/pkg/api/v1alpha1/types.pb.go b/pkg/api/v1alpha1/types.pb.go index ec277c068..fd56476d4 100644 --- a/pkg/api/v1alpha1/types.pb.go +++ b/pkg/api/v1alpha1/types.pb.go @@ -2162,6 +2162,7 @@ type StageSpec struct { Subscriptions *Subscriptions `protobuf:"bytes,1,opt,name=subscriptions,proto3" json:"subscriptions,omitempty"` PromotionMechanisms *PromotionMechanisms `protobuf:"bytes,2,opt,name=promotion_mechanisms,json=promotionMechanisms,proto3" json:"promotion_mechanisms,omitempty"` + Verification *Verification `protobuf:"bytes,3,opt,name=verification,proto3,oneof" json:"verification,omitempty"` } func (x *StageSpec) Reset() { @@ -2210,6 +2211,13 @@ func (x *StageSpec) GetPromotionMechanisms() *PromotionMechanisms { return nil } +func (x *StageSpec) GetVerification() *Verification { + if x != nil { + return x.Verification + } + return nil +} + type Freight struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2449,11 +2457,12 @@ type SimpleFreight struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - FirstSeen *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=first_seen,json=firstSeen,proto3,oneof" json:"first_seen,omitempty"` - Commits []*GitCommit `protobuf:"bytes,4,rep,name=commits,proto3" json:"commits,omitempty"` - Images []*Image `protobuf:"bytes,5,rep,name=images,proto3" json:"images,omitempty"` - Charts []*Chart `protobuf:"bytes,6,rep,name=charts,proto3" json:"charts,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + FirstSeen *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=first_seen,json=firstSeen,proto3,oneof" json:"first_seen,omitempty"` + Commits []*GitCommit `protobuf:"bytes,4,rep,name=commits,proto3" json:"commits,omitempty"` + Images []*Image `protobuf:"bytes,5,rep,name=images,proto3" json:"images,omitempty"` + Charts []*Chart `protobuf:"bytes,6,rep,name=charts,proto3" json:"charts,omitempty"` + VerificationInfo *VerificationInfo `protobuf:"bytes,7,opt,name=verification_info,json=verificationInfo,proto3,oneof" json:"verification_info,omitempty"` } func (x *SimpleFreight) Reset() { @@ -2523,6 +2532,13 @@ func (x *SimpleFreight) GetCharts() []*Chart { return nil } +func (x *SimpleFreight) GetVerificationInfo() *VerificationInfo { + if x != nil { + return x.VerificationInfo + } + return nil +} + type StageStatus struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2533,6 +2549,7 @@ type StageStatus struct { Error string `protobuf:"bytes,4,opt,name=error,proto3" json:"error,omitempty"` Health *Health `protobuf:"bytes,5,opt,name=health,proto3,oneof" json:"health,omitempty"` CurrentPromotion *PromotionInfo `protobuf:"bytes,6,opt,name=current_promotion,json=currentPromotion,proto3,oneof" json:"current_promotion,omitempty"` + Phase string `protobuf:"bytes,7,opt,name=phase,proto3" json:"phase,omitempty"` } func (x *StageStatus) Reset() { @@ -2602,6 +2619,13 @@ func (x *StageStatus) GetCurrentPromotion() *PromotionInfo { return nil } +func (x *StageStatus) GetPhase() string { + if x != nil { + return x.Phase + } + return "" +} + type StageSubscription struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2885,6 +2909,336 @@ func (x *WarehouseStatus) GetObservedGeneration() int64 { return 0 } +type Verification struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AnalysisTemplates []*AnalysisTemplateReference `protobuf:"bytes,1,rep,name=analysis_templates,json=analysisTemplates,proto3" json:"analysis_templates,omitempty"` + AnalysisRunMetadata *AnalysisRunMetadata `protobuf:"bytes,2,opt,name=analysis_run_metadata,json=analysisRunMetadata,proto3,oneof" json:"analysis_run_metadata,omitempty"` + Args []*AnalysisRunArgument `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` +} + +func (x *Verification) Reset() { + *x = Verification{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Verification) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Verification) ProtoMessage() {} + +func (x *Verification) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[46] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Verification.ProtoReflect.Descriptor instead. +func (*Verification) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{46} +} + +func (x *Verification) GetAnalysisTemplates() []*AnalysisTemplateReference { + if x != nil { + return x.AnalysisTemplates + } + return nil +} + +func (x *Verification) GetAnalysisRunMetadata() *AnalysisRunMetadata { + if x != nil { + return x.AnalysisRunMetadata + } + return nil +} + +func (x *Verification) GetArgs() []*AnalysisRunArgument { + if x != nil { + return x.Args + } + return nil +} + +type AnalysisTemplateReference struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *AnalysisTemplateReference) Reset() { + *x = AnalysisTemplateReference{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[47] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AnalysisTemplateReference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AnalysisTemplateReference) ProtoMessage() {} + +func (x *AnalysisTemplateReference) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[47] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AnalysisTemplateReference.ProtoReflect.Descriptor instead. +func (*AnalysisTemplateReference) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{47} +} + +func (x *AnalysisTemplateReference) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type AnalysisRunMetadata struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Annotations map[string]string `protobuf:"bytes,2,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *AnalysisRunMetadata) Reset() { + *x = AnalysisRunMetadata{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[48] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AnalysisRunMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AnalysisRunMetadata) ProtoMessage() {} + +func (x *AnalysisRunMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[48] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AnalysisRunMetadata.ProtoReflect.Descriptor instead. +func (*AnalysisRunMetadata) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{48} +} + +func (x *AnalysisRunMetadata) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + +func (x *AnalysisRunMetadata) GetAnnotations() map[string]string { + if x != nil { + return x.Annotations + } + return nil +} + +type AnalysisRunArgument struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *AnalysisRunArgument) Reset() { + *x = AnalysisRunArgument{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[49] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AnalysisRunArgument) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AnalysisRunArgument) ProtoMessage() {} + +func (x *AnalysisRunArgument) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[49] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AnalysisRunArgument.ProtoReflect.Descriptor instead. +func (*AnalysisRunArgument) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{49} +} + +func (x *AnalysisRunArgument) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *AnalysisRunArgument) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +type VerificationInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AnalysisRun *AnalysisRunReference `protobuf:"bytes,1,opt,name=analysis_run,json=analysisRun,proto3" json:"analysis_run,omitempty"` +} + +func (x *VerificationInfo) Reset() { + *x = VerificationInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[50] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerificationInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerificationInfo) ProtoMessage() {} + +func (x *VerificationInfo) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[50] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerificationInfo.ProtoReflect.Descriptor instead. +func (*VerificationInfo) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{50} +} + +func (x *VerificationInfo) GetAnalysisRun() *AnalysisRunReference { + if x != nil { + return x.AnalysisRun + } + return nil +} + +type AnalysisRunReference struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Phase string `protobuf:"bytes,3,opt,name=phase,proto3" json:"phase,omitempty"` +} + +func (x *AnalysisRunReference) Reset() { + *x = AnalysisRunReference{} + if protoimpl.UnsafeEnabled { + mi := &file_v1alpha1_types_proto_msgTypes[51] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AnalysisRunReference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AnalysisRunReference) ProtoMessage() {} + +func (x *AnalysisRunReference) ProtoReflect() protoreflect.Message { + mi := &file_v1alpha1_types_proto_msgTypes[51] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AnalysisRunReference.ProtoReflect.Descriptor instead. +func (*AnalysisRunReference) Descriptor() ([]byte, []int) { + return file_v1alpha1_types_proto_rawDescGZIP(), []int{51} +} + +func (x *AnalysisRunReference) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +func (x *AnalysisRunReference) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *AnalysisRunReference) GetPhase() string { + if x != nil { + return x.Phase + } + return "" +} + var File_v1alpha1_types_proto protoreflect.FileDescriptor var file_v1alpha1_types_proto_rawDesc = []byte{ @@ -3257,8 +3611,8 @@ var file_v1alpha1_types_proto_rawDesc = []byte{ 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0xdc, - 0x01, 0x0a, 0x09, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x5d, 0x0a, 0x0d, + 0x31, 0x2e, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0xce, + 0x02, 0x0a, 0x09, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x5d, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, @@ -3271,182 +3625,264 @@ var file_v1alpha1_types_proto_rawDesc = []byte{ 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x63, 0x68, 0x61, 0x6e, 0x69, 0x73, 0x6d, 0x73, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x6d, 0x6f, 0x74, - 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x63, 0x68, 0x61, 0x6e, 0x69, 0x73, 0x6d, 0x73, 0x22, 0xd0, 0x03, - 0x0a, 0x07, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x70, 0x69, - 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, - 0x61, 0x70, 0x69, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, - 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x4e, - 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x32, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, - 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x4d, 0x65, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4d, - 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, + 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x63, 0x68, 0x61, 0x6e, 0x69, 0x73, 0x6d, 0x73, 0x12, 0x5f, 0x0a, + 0x0c, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, + 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, + 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x76, + 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x0f, + 0x0a, 0x0d, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0xd0, 0x03, 0x0a, 0x07, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x61, + 0x70, 0x69, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0a, 0x61, 0x70, 0x69, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, + 0x12, 0x4e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, + 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x4d, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x33, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, + 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x47, 0x69, 0x74, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x12, + 0x47, 0x0a, 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x47, 0x69, 0x74, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x47, 0x0a, - 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, - 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x52, 0x06, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, 0x73, - 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, - 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x43, 0x68, 0x61, 0x72, 0x74, 0x52, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, 0x73, 0x12, - 0x4f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, + 0x52, 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x06, 0x63, 0x68, 0x61, 0x72, + 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, + 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x61, 0x72, 0x74, 0x52, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, + 0x73, 0x12, 0x4f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, + 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x72, 0x65, + 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0xd7, 0x03, 0x0a, 0x0d, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x68, 0x0a, 0x0b, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, + 0x5f, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, + 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x0a, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x12, 0x6b, + 0x0a, 0x0c, 0x61, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, + 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x41, 0x70, + 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x46, 0x6f, 0x72, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, + 0x61, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x46, 0x6f, 0x72, 0x1a, 0x76, 0x0a, 0x0f, 0x56, + 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x4d, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x72, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x22, 0xd7, 0x03, 0x0a, 0x0d, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x68, 0x0a, 0x0b, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x69, - 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, - 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x0a, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x12, 0x6b, 0x0a, 0x0c, - 0x61, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, - 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x46, 0x72, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x41, 0x70, 0x70, 0x72, - 0x6f, 0x76, 0x65, 0x64, 0x46, 0x6f, 0x72, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x61, 0x70, - 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x46, 0x6f, 0x72, 0x1a, 0x76, 0x0a, 0x0f, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x69, 0x65, 0x64, 0x49, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4d, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, - 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x1a, 0x77, 0x0a, 0x10, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x46, 0x6f, 0x72, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4d, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, - 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x0f, 0x0a, 0x0d, 0x56, 0x65, - 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x41, - 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x22, 0xcf, 0x02, 0x0a, - 0x0d, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3e, - 0x0a, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, - 0x52, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x4d, - 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, + 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, + 0x69, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x1a, 0x77, 0x0a, 0x10, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x46, + 0x6f, 0x72, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4d, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, + 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, + 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x0f, 0x0a, 0x0d, + 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x22, 0x0f, 0x0a, + 0x0d, 0x41, 0x70, 0x70, 0x72, 0x6f, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x22, 0xd3, + 0x03, 0x0a, 0x0d, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x3e, 0x0a, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x48, 0x00, 0x52, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x88, 0x01, 0x01, + 0x12, 0x4d, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x33, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, + 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x47, 0x69, 0x74, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x12, + 0x47, 0x0a, 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x47, 0x69, 0x74, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x47, 0x0a, - 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, + 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, + 0x52, 0x06, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x06, 0x63, 0x68, 0x61, 0x72, + 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, + 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x43, 0x68, 0x61, 0x72, 0x74, 0x52, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, + 0x73, 0x12, 0x6c, 0x0a, 0x11, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, + 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x01, 0x52, 0x10, 0x76, 0x65, 0x72, 0x69, + 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x42, + 0x0d, 0x0a, 0x0b, 0x5f, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x42, 0x14, + 0x0a, 0x12, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x69, 0x6e, 0x66, 0x6f, 0x22, 0xe2, 0x03, 0x0a, 0x0b, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x65, 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, + 0x66, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x52, 0x06, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, 0x73, - 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, - 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x43, 0x68, 0x61, 0x72, 0x74, 0x52, 0x06, 0x63, 0x68, 0x61, 0x72, 0x74, 0x73, 0x42, - 0x0d, 0x0a, 0x0b, 0x5f, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x22, 0xcc, - 0x03, 0x0a, 0x0b, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x65, - 0x0a, 0x0f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x66, 0x72, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, - 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, - 0x48, 0x00, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x46, 0x72, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x88, 0x01, 0x01, 0x12, 0x51, 0x0a, 0x07, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, - 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x31, 0x2e, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, - 0x07, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4d, - 0x0a, 0x06, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, + 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x46, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x12, 0x51, 0x0a, 0x07, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, + 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x46, 0x72, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x07, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4d, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, + 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x48, 0x01, 0x52, 0x06, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, + 0x88, 0x01, 0x01, 0x12, 0x69, 0x0a, 0x11, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x70, + 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, - 0x48, 0x01, 0x52, 0x06, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x69, 0x0a, - 0x11, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, - 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, - 0x6f, 0x48, 0x02, 0x52, 0x10, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x6d, - 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x66, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x42, 0x09, 0x0a, 0x07, - 0x5f, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x27, 0x0a, - 0x11, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x0d, 0x53, 0x75, 0x62, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x64, 0x0a, 0x0f, 0x75, 0x70, 0x73, 0x74, - 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x3b, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, - 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x74, 0x61, - 0x67, 0x65, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0e, - 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x74, 0x61, 0x67, 0x65, 0x73, 0x12, 0x1c, - 0x0a, 0x09, 0x77, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x77, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x22, 0xb0, 0x02, 0x0a, - 0x09, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x70, - 0x69, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x61, 0x70, 0x69, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6b, - 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, - 0x4e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, - 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, - 0x4b, 0x0a, 0x04, 0x73, 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, + 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x6d, 0x6f, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x02, 0x52, 0x10, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x14, + 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, + 0x68, 0x61, 0x73, 0x65, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x5f, 0x66, 0x72, 0x65, 0x69, 0x67, 0x68, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x68, 0x65, 0x61, + 0x6c, 0x74, 0x68, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, + 0x70, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x27, 0x0a, 0x11, 0x53, 0x74, 0x61, + 0x67, 0x65, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x0d, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x64, 0x0a, 0x0f, 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, + 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, - 0x73, 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x51, 0x0a, 0x06, - 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x67, + 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x67, 0x65, 0x53, 0x75, + 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x75, 0x70, 0x73, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x53, 0x74, 0x61, 0x67, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x77, 0x61, + 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x77, + 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x22, 0xb0, 0x02, 0x0a, 0x09, 0x57, 0x61, 0x72, + 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x70, 0x69, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x4e, 0x0a, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, + 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x6d, 0x65, 0x74, 0x61, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, + 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x4b, 0x0a, 0x04, 0x73, + 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, + 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2e, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, 0x70, + 0x65, 0x63, 0x52, 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x51, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, + 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x2e, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x71, 0x0a, 0x0d, 0x57, + 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x60, 0x0a, 0x0d, + 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, + 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x52, + 0x65, 0x70, 0x6f, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x58, + 0x0a, 0x0f, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x2f, 0x0a, 0x13, 0x6f, 0x62, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x64, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x12, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x47, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xe7, 0x02, 0x0a, 0x0c, 0x56, 0x65, 0x72, + 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x72, 0x0a, 0x12, 0x61, 0x6e, 0x61, + 0x6c, 0x79, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x43, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, + 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x11, 0x61, 0x6e, 0x61, 0x6c, + 0x79, 0x73, 0x69, 0x73, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0x76, 0x0a, + 0x15, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, - 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, - 0x71, 0x0a, 0x0d, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x60, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, - 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, - 0x61, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x22, 0x58, 0x0a, 0x0f, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x2f, 0x0a, 0x13, 0x6f, - 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x12, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x64, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0xad, 0x02, 0x0a, - 0x2c, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, - 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0a, 0x54, - 0x79, 0x70, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x28, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2f, 0x6b, - 0x61, 0x72, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x31, 0xa2, 0x02, 0x06, 0x47, 0x43, 0x41, 0x4b, 0x50, 0x41, 0xaa, 0x02, - 0x28, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x43, 0x6f, 0x6d, 0x2e, 0x41, 0x6b, 0x75, 0x69, - 0x74, 0x79, 0x2e, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x50, 0x6b, 0x67, 0x2e, 0x41, 0x70, 0x69, - 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x28, 0x47, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x5c, 0x43, 0x6f, 0x6d, 0x5c, 0x41, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x5c, 0x4b, 0x61, - 0x72, 0x67, 0x6f, 0x5c, 0x50, 0x6b, 0x67, 0x5c, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0xe2, 0x02, 0x34, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x5c, 0x43, 0x6f, - 0x6d, 0x5c, 0x41, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x5c, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x5c, 0x50, - 0x6b, 0x67, 0x5c, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x5c, - 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x2e, 0x47, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x3a, 0x3a, 0x43, 0x6f, 0x6d, 0x3a, 0x3a, 0x41, 0x6b, 0x75, 0x69, 0x74, - 0x79, 0x3a, 0x3a, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x3a, 0x3a, 0x50, 0x6b, 0x67, 0x3a, 0x3a, 0x41, - 0x70, 0x69, 0x3a, 0x3a, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, + 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, 0x13, 0x61, + 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x51, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, + 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, + 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x75, 0x6e, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x61, 0x6e, 0x61, + 0x6c, 0x79, 0x73, 0x69, 0x73, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x2f, 0x0a, 0x19, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x22, 0xe5, 0x02, 0x0a, 0x13, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, + 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x61, 0x0a, 0x06, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x49, 0x2e, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, + 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, + 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x70, + 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x4e, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, + 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x41, + 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3e, 0x0a, 0x10, 0x41, + 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x3f, 0x0a, 0x13, 0x41, + 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x75, 0x6e, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x75, 0x0a, 0x10, + 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, + 0x12, 0x61, 0x0a, 0x0c, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x5f, 0x72, 0x75, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, 0x72, 0x67, 0x6f, + 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x31, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x0b, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, + 0x52, 0x75, 0x6e, 0x22, 0x5e, 0x0a, 0x14, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, + 0x75, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, + 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x68, + 0x61, 0x73, 0x65, 0x42, 0xad, 0x02, 0x0a, 0x2c, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x6b, 0x61, + 0x72, 0x67, 0x6f, 0x2e, 0x70, 0x6b, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x42, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x50, 0x01, 0x5a, 0x28, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, + 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2f, 0x6b, 0x61, 0x72, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, + 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xa2, 0x02, 0x06, 0x47, + 0x43, 0x41, 0x4b, 0x50, 0x41, 0xaa, 0x02, 0x28, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x43, + 0x6f, 0x6d, 0x2e, 0x41, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x2e, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x2e, + 0x50, 0x6b, 0x67, 0x2e, 0x41, 0x70, 0x69, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, + 0xca, 0x02, 0x28, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x5c, 0x43, 0x6f, 0x6d, 0x5c, 0x41, 0x6b, + 0x75, 0x69, 0x74, 0x79, 0x5c, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x5c, 0x50, 0x6b, 0x67, 0x5c, 0x41, + 0x70, 0x69, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xe2, 0x02, 0x34, 0x47, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x5c, 0x43, 0x6f, 0x6d, 0x5c, 0x41, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x5c, + 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x5c, 0x50, 0x6b, 0x67, 0x5c, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, + 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0xea, 0x02, 0x2e, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x3a, 0x3a, 0x43, 0x6f, 0x6d, + 0x3a, 0x3a, 0x41, 0x6b, 0x75, 0x69, 0x74, 0x79, 0x3a, 0x3a, 0x4b, 0x61, 0x72, 0x67, 0x6f, 0x3a, + 0x3a, 0x50, 0x6b, 0x67, 0x3a, 0x3a, 0x41, 0x70, 0x69, 0x3a, 0x3a, 0x56, 0x31, 0x61, 0x6c, 0x70, + 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -3461,7 +3897,7 @@ func file_v1alpha1_types_proto_rawDescGZIP() []byte { return file_v1alpha1_types_proto_rawDescData } -var file_v1alpha1_types_proto_msgTypes = make([]protoimpl.MessageInfo, 48) +var file_v1alpha1_types_proto_msgTypes = make([]protoimpl.MessageInfo, 56) var file_v1alpha1_types_proto_goTypes = []interface{}{ (*ArgoCDAppUpdate)(nil), // 0: github.com.akuity.kargo.pkg.api.v1alpha1.ArgoCDAppUpdate (*ArgoCDHelm)(nil), // 1: github.com.akuity.kargo.pkg.api.v1alpha1.ArgoCDHelm @@ -3509,11 +3945,19 @@ var file_v1alpha1_types_proto_goTypes = []interface{}{ (*Warehouse)(nil), // 43: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse (*WarehouseSpec)(nil), // 44: github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseSpec (*WarehouseStatus)(nil), // 45: github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseStatus - nil, // 46: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry - nil, // 47: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry - (*metav1.ObjectMeta)(nil), // 48: github.com.akuity.kargo.pkg.api.metav1.ObjectMeta - (*metav1.ListMeta)(nil), // 49: github.com.akuity.kargo.pkg.api.metav1.ListMeta - (*timestamppb.Timestamp)(nil), // 50: google.protobuf.Timestamp + (*Verification)(nil), // 46: github.com.akuity.kargo.pkg.api.v1alpha1.Verification + (*AnalysisTemplateReference)(nil), // 47: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisTemplateReference + (*AnalysisRunMetadata)(nil), // 48: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata + (*AnalysisRunArgument)(nil), // 49: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunArgument + (*VerificationInfo)(nil), // 50: github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo + (*AnalysisRunReference)(nil), // 51: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunReference + nil, // 52: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry + nil, // 53: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry + nil, // 54: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.LabelsEntry + nil, // 55: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.AnnotationsEntry + (*metav1.ObjectMeta)(nil), // 56: github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + (*metav1.ListMeta)(nil), // 57: github.com.akuity.kargo.pkg.api.metav1.ListMeta + (*timestamppb.Timestamp)(nil), // 58: google.protobuf.Timestamp } var file_v1alpha1_types_proto_depIdxs = []int32{ 5, // 0: github.com.akuity.kargo.pkg.api.v1alpha1.ArgoCDAppUpdate.source_updates:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ArgoCDSourceUpdate @@ -3530,54 +3974,62 @@ var file_v1alpha1_types_proto_depIdxs = []int32{ 17, // 11: github.com.akuity.kargo.pkg.api.v1alpha1.HelmPromotionMechanism.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.HelmImageUpdate 16, // 12: github.com.akuity.kargo.pkg.api.v1alpha1.HelmPromotionMechanism.charts:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.HelmChartDependencyUpdate 21, // 13: github.com.akuity.kargo.pkg.api.v1alpha1.KustomizePromotionMechanism.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.KustomizeImageUpdate - 48, // 14: github.com.akuity.kargo.pkg.api.v1alpha1.Promotion.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + 56, // 14: github.com.akuity.kargo.pkg.api.v1alpha1.Promotion.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta 29, // 15: github.com.akuity.kargo.pkg.api.v1alpha1.Promotion.spec:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionSpec 30, // 16: github.com.akuity.kargo.pkg.api.v1alpha1.Promotion.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionStatus 39, // 17: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionInfo.freight:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight - 49, // 18: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta + 57, // 18: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta 23, // 19: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionList.items:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Promotion 10, // 20: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionMechanisms.git_repo_updates:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitRepoUpdate 0, // 21: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionMechanisms.argocd_app_updates:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ArgoCDAppUpdate - 48, // 22: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicy.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta - 49, // 23: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicyList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta + 56, // 22: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicy.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + 57, // 23: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicyList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta 27, // 24: github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicyList.items:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionPolicy 11, // 25: github.com.akuity.kargo.pkg.api.v1alpha1.RepoSubscription.git:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitSubscription 20, // 26: github.com.akuity.kargo.pkg.api.v1alpha1.RepoSubscription.image:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ImageSubscription 8, // 27: github.com.akuity.kargo.pkg.api.v1alpha1.RepoSubscription.chart:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ChartSubscription - 48, // 28: github.com.akuity.kargo.pkg.api.v1alpha1.Stage.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + 56, // 28: github.com.akuity.kargo.pkg.api.v1alpha1.Stage.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta 34, // 29: github.com.akuity.kargo.pkg.api.v1alpha1.Stage.spec:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.StageSpec 40, // 30: github.com.akuity.kargo.pkg.api.v1alpha1.Stage.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus - 49, // 31: github.com.akuity.kargo.pkg.api.v1alpha1.StageList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta + 57, // 31: github.com.akuity.kargo.pkg.api.v1alpha1.StageList.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ListMeta 32, // 32: github.com.akuity.kargo.pkg.api.v1alpha1.StageList.items:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Stage 42, // 33: github.com.akuity.kargo.pkg.api.v1alpha1.StageSpec.subscriptions:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Subscriptions 26, // 34: github.com.akuity.kargo.pkg.api.v1alpha1.StageSpec.promotion_mechanisms:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionMechanisms - 48, // 35: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta - 9, // 36: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.commits:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitCommit - 19, // 37: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Image - 7, // 38: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.charts:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Chart - 36, // 39: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus - 46, // 40: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.verified_in:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry - 47, // 41: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.approved_for:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry - 50, // 42: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.first_seen:type_name -> google.protobuf.Timestamp - 9, // 43: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.commits:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitCommit - 19, // 44: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Image - 7, // 45: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.charts:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Chart - 39, // 46: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.current_freight:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight - 39, // 47: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.history:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight - 12, // 48: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.health:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Health - 24, // 49: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.current_promotion:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionInfo - 41, // 50: github.com.akuity.kargo.pkg.api.v1alpha1.Subscriptions.upstream_stages:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.StageSubscription - 48, // 51: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta - 44, // 52: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.spec:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseSpec - 45, // 53: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseStatus - 31, // 54: github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseSpec.subscriptions:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.RepoSubscription - 37, // 55: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry.value:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.VerifiedStage - 38, // 56: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry.value:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ApprovedStage - 57, // [57:57] is the sub-list for method output_type - 57, // [57:57] is the sub-list for method input_type - 57, // [57:57] is the sub-list for extension type_name - 57, // [57:57] is the sub-list for extension extendee - 0, // [0:57] is the sub-list for field type_name + 46, // 35: github.com.akuity.kargo.pkg.api.v1alpha1.StageSpec.verification:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Verification + 56, // 36: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + 9, // 37: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.commits:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitCommit + 19, // 38: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Image + 7, // 39: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.charts:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Chart + 36, // 40: github.com.akuity.kargo.pkg.api.v1alpha1.Freight.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus + 52, // 41: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.verified_in:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry + 53, // 42: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.approved_for:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry + 58, // 43: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.first_seen:type_name -> google.protobuf.Timestamp + 9, // 44: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.commits:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.GitCommit + 19, // 45: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.images:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Image + 7, // 46: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.charts:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Chart + 50, // 47: github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight.verification_info:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo + 39, // 48: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.current_freight:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight + 39, // 49: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.history:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.SimpleFreight + 12, // 50: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.health:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.Health + 24, // 51: github.com.akuity.kargo.pkg.api.v1alpha1.StageStatus.current_promotion:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.PromotionInfo + 41, // 52: github.com.akuity.kargo.pkg.api.v1alpha1.Subscriptions.upstream_stages:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.StageSubscription + 56, // 53: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.metadata:type_name -> github.com.akuity.kargo.pkg.api.metav1.ObjectMeta + 44, // 54: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.spec:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseSpec + 45, // 55: github.com.akuity.kargo.pkg.api.v1alpha1.Warehouse.status:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseStatus + 31, // 56: github.com.akuity.kargo.pkg.api.v1alpha1.WarehouseSpec.subscriptions:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.RepoSubscription + 47, // 57: github.com.akuity.kargo.pkg.api.v1alpha1.Verification.analysis_templates:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisTemplateReference + 48, // 58: github.com.akuity.kargo.pkg.api.v1alpha1.Verification.analysis_run_metadata:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata + 49, // 59: github.com.akuity.kargo.pkg.api.v1alpha1.Verification.args:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunArgument + 54, // 60: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.labels:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.LabelsEntry + 55, // 61: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.annotations:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata.AnnotationsEntry + 51, // 62: github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo.analysis_run:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunReference + 37, // 63: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.VerifiedInEntry.value:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.VerifiedStage + 38, // 64: github.com.akuity.kargo.pkg.api.v1alpha1.FreightStatus.ApprovedForEntry.value:type_name -> github.com.akuity.kargo.pkg.api.v1alpha1.ApprovedStage + 65, // [65:65] is the sub-list for method output_type + 65, // [65:65] is the sub-list for method input_type + 65, // [65:65] is the sub-list for extension type_name + 65, // [65:65] is the sub-list for extension extendee + 0, // [0:65] is the sub-list for field type_name } func init() { file_v1alpha1_types_proto_init() } @@ -4138,6 +4590,78 @@ func file_v1alpha1_types_proto_init() { return nil } } + file_v1alpha1_types_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Verification); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_v1alpha1_types_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AnalysisTemplateReference); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_v1alpha1_types_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AnalysisRunMetadata); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_v1alpha1_types_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AnalysisRunArgument); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_v1alpha1_types_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerificationInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_v1alpha1_types_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AnalysisRunReference); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_v1alpha1_types_proto_msgTypes[0].OneofWrappers = []interface{}{} file_v1alpha1_types_proto_msgTypes[5].OneofWrappers = []interface{}{} @@ -4148,15 +4672,17 @@ func file_v1alpha1_types_proto_init() { file_v1alpha1_types_proto_msgTypes[23].OneofWrappers = []interface{}{} file_v1alpha1_types_proto_msgTypes[25].OneofWrappers = []interface{}{} file_v1alpha1_types_proto_msgTypes[31].OneofWrappers = []interface{}{} + file_v1alpha1_types_proto_msgTypes[34].OneofWrappers = []interface{}{} file_v1alpha1_types_proto_msgTypes[39].OneofWrappers = []interface{}{} file_v1alpha1_types_proto_msgTypes[40].OneofWrappers = []interface{}{} + file_v1alpha1_types_proto_msgTypes[46].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_v1alpha1_types_proto_rawDesc, NumEnums: 0, - NumMessages: 48, + NumMessages: 56, NumExtensions: 0, NumServices: 0, }, diff --git a/ui/src/gen/schema/stages.kargo.akuity.io_v1alpha1.json b/ui/src/gen/schema/stages.kargo.akuity.io_v1alpha1.json index ebfa35113..9fa06e02b 100644 --- a/ui/src/gen/schema/stages.kargo.akuity.io_v1alpha1.json +++ b/ui/src/gen/schema/stages.kargo.akuity.io_v1alpha1.json @@ -356,6 +356,70 @@ } }, "type": "object" + }, + "verification": { + "description": "Verification describes how to verify a Stage's current Freight is fit for promotion downstream.", + "properties": { + "analysisRunMetadata": { + "description": "AnalysisRunMetadata is contains optional metadata that should be applied to all AnalysisRuns.", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional annotations to apply to an AnalysisRun.", + "type": "object" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional labels to apply to an AnalysisRun.", + "type": "object" + } + }, + "type": "object" + }, + "analysisTemplates": { + "description": "AnalysisTemplates is a list of AnalysisTemplates from which AnalysisRuns should be created to verify a Stage's current Freight is fit to be promoted downstream.", + "items": { + "description": "AnalysisTemplateReference is a reference to an AnalysisTemplate.", + "properties": { + "name": { + "description": "Name is the name of the AnalysisTemplate in the same project/namespace as the Stage.", + "type": "string" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "type": "array" + }, + "args": { + "description": "Args lists arguments that should be added to all AnalysisRuns.", + "items": { + "description": "AnalysisRunArgument represents an argument to be added to an AnalysisRun.", + "properties": { + "name": { + "description": "Name is the name of the argument.", + "type": "string" + }, + "value": { + "description": "Value is the value of the argument.", + "type": "string" + } + }, + "required": [ + "name" + ], + "type": "object" + }, + "type": "array" + } + }, + "type": "object" } }, "required": [ @@ -454,6 +518,38 @@ "type": "object" }, "type": "array" + }, + "verificationResult": { + "description": "VerificationInfo is information about any verification process that was associated with this Freight for this Stage.", + "properties": { + "analysisRun": { + "description": "AnalysisRunReference is a reference to an AnalysisRun.", + "properties": { + "name": { + "description": "Name is the name of the AnalysisRun.", + "type": "string" + }, + "namespace": { + "description": "Namespace is the namespace of the AnalysisRun.", + "type": "string" + }, + "phase": { + "description": "Phase is the last observed phase of the AnalysisRun referenced by Name.", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "phase" + ], + "type": "object" + } + }, + "required": [ + "analysisRun" + ], + "type": "object" } }, "type": "object" @@ -549,6 +645,38 @@ "type": "object" }, "type": "array" + }, + "verificationResult": { + "description": "VerificationInfo is information about any verification process that was associated with this Freight for this Stage.", + "properties": { + "analysisRun": { + "description": "AnalysisRunReference is a reference to an AnalysisRun.", + "properties": { + "name": { + "description": "Name is the name of the AnalysisRun.", + "type": "string" + }, + "namespace": { + "description": "Namespace is the namespace of the AnalysisRun.", + "type": "string" + }, + "phase": { + "description": "Phase is the last observed phase of the AnalysisRun referenced by Name.", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "phase" + ], + "type": "object" + } + }, + "required": [ + "analysisRun" + ], + "type": "object" } }, "type": "object" @@ -733,6 +861,38 @@ "type": "object" }, "type": "array" + }, + "verificationResult": { + "description": "VerificationInfo is information about any verification process that was associated with this Freight for this Stage.", + "properties": { + "analysisRun": { + "description": "AnalysisRunReference is a reference to an AnalysisRun.", + "properties": { + "name": { + "description": "Name is the name of the AnalysisRun.", + "type": "string" + }, + "namespace": { + "description": "Namespace is the namespace of the AnalysisRun.", + "type": "string" + }, + "phase": { + "description": "Phase is the last observed phase of the AnalysisRun referenced by Name.", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "phase" + ], + "type": "object" + } + }, + "required": [ + "analysisRun" + ], + "type": "object" } }, "type": "object" @@ -745,6 +905,10 @@ "maximum": 9223372036854776000, "minimum": -9223372036854776000, "type": "integer" + }, + "phase": { + "description": "Phase describes where the Stage currently is in its lifecycle.", + "type": "string" } }, "type": "object" diff --git a/ui/src/gen/v1alpha1/types_pb.ts b/ui/src/gen/v1alpha1/types_pb.ts index e29f25ebc..e109eaced 100644 --- a/ui/src/gen/v1alpha1/types_pb.ts +++ b/ui/src/gen/v1alpha1/types_pb.ts @@ -1681,6 +1681,11 @@ export class StageSpec extends Message { */ promotionMechanisms?: PromotionMechanisms; + /** + * @generated from field: optional github.com.akuity.kargo.pkg.api.v1alpha1.Verification verification = 3; + */ + verification?: Verification; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -1691,6 +1696,7 @@ export class StageSpec extends Message { static readonly fields: FieldList = proto3.util.newFieldList(() => [ { no: 1, name: "subscriptions", kind: "message", T: Subscriptions }, { no: 2, name: "promotion_mechanisms", kind: "message", T: PromotionMechanisms }, + { no: 3, name: "verification", kind: "message", T: Verification, opt: true }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): StageSpec { @@ -1923,6 +1929,11 @@ export class SimpleFreight extends Message { */ charts: Chart[] = []; + /** + * @generated from field: optional github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo verification_info = 7; + */ + verificationInfo?: VerificationInfo; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -1936,6 +1947,7 @@ export class SimpleFreight extends Message { { no: 4, name: "commits", kind: "message", T: GitCommit, repeated: true }, { no: 5, name: "images", kind: "message", T: Image, repeated: true }, { no: 6, name: "charts", kind: "message", T: Chart, repeated: true }, + { no: 7, name: "verification_info", kind: "message", T: VerificationInfo, opt: true }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): SimpleFreight { @@ -1984,6 +1996,11 @@ export class StageStatus extends Message { */ currentPromotion?: PromotionInfo; + /** + * @generated from field: string phase = 7; + */ + phase = ""; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -1997,6 +2014,7 @@ export class StageStatus extends Message { { no: 4, name: "error", kind: "scalar", T: 9 /* ScalarType.STRING */ }, { no: 5, name: "health", kind: "message", T: Health, opt: true }, { no: 6, name: "current_promotion", kind: "message", T: PromotionInfo, opt: true }, + { no: 7, name: "phase", kind: "scalar", T: 9 /* ScalarType.STRING */ }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): StageStatus { @@ -2237,3 +2255,261 @@ export class WarehouseStatus extends Message { } } +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.Verification + */ +export class Verification extends Message { + /** + * @generated from field: repeated github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisTemplateReference analysis_templates = 1; + */ + analysisTemplates: AnalysisTemplateReference[] = []; + + /** + * @generated from field: optional github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata analysis_run_metadata = 2; + */ + analysisRunMetadata?: AnalysisRunMetadata; + + /** + * @generated from field: repeated github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunArgument args = 3; + */ + args: AnalysisRunArgument[] = []; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.Verification"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "analysis_templates", kind: "message", T: AnalysisTemplateReference, repeated: true }, + { no: 2, name: "analysis_run_metadata", kind: "message", T: AnalysisRunMetadata, opt: true }, + { no: 3, name: "args", kind: "message", T: AnalysisRunArgument, repeated: true }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): Verification { + return new Verification().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): Verification { + return new Verification().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): Verification { + return new Verification().fromJsonString(jsonString, options); + } + + static equals(a: Verification | PlainMessage | undefined, b: Verification | PlainMessage | undefined): boolean { + return proto3.util.equals(Verification, a, b); + } +} + +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisTemplateReference + */ +export class AnalysisTemplateReference extends Message { + /** + * @generated from field: string name = 1; + */ + name = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisTemplateReference"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "name", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): AnalysisTemplateReference { + return new AnalysisTemplateReference().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): AnalysisTemplateReference { + return new AnalysisTemplateReference().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): AnalysisTemplateReference { + return new AnalysisTemplateReference().fromJsonString(jsonString, options); + } + + static equals(a: AnalysisTemplateReference | PlainMessage | undefined, b: AnalysisTemplateReference | PlainMessage | undefined): boolean { + return proto3.util.equals(AnalysisTemplateReference, a, b); + } +} + +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata + */ +export class AnalysisRunMetadata extends Message { + /** + * @generated from field: map labels = 1; + */ + labels: { [key: string]: string } = {}; + + /** + * @generated from field: map annotations = 2; + */ + annotations: { [key: string]: string } = {}; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunMetadata"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "labels", kind: "map", K: 9 /* ScalarType.STRING */, V: {kind: "scalar", T: 9 /* ScalarType.STRING */} }, + { no: 2, name: "annotations", kind: "map", K: 9 /* ScalarType.STRING */, V: {kind: "scalar", T: 9 /* ScalarType.STRING */} }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): AnalysisRunMetadata { + return new AnalysisRunMetadata().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): AnalysisRunMetadata { + return new AnalysisRunMetadata().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): AnalysisRunMetadata { + return new AnalysisRunMetadata().fromJsonString(jsonString, options); + } + + static equals(a: AnalysisRunMetadata | PlainMessage | undefined, b: AnalysisRunMetadata | PlainMessage | undefined): boolean { + return proto3.util.equals(AnalysisRunMetadata, a, b); + } +} + +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunArgument + */ +export class AnalysisRunArgument extends Message { + /** + * @generated from field: string name = 1; + */ + name = ""; + + /** + * @generated from field: string value = 2; + */ + value = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunArgument"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "name", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "value", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): AnalysisRunArgument { + return new AnalysisRunArgument().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): AnalysisRunArgument { + return new AnalysisRunArgument().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): AnalysisRunArgument { + return new AnalysisRunArgument().fromJsonString(jsonString, options); + } + + static equals(a: AnalysisRunArgument | PlainMessage | undefined, b: AnalysisRunArgument | PlainMessage | undefined): boolean { + return proto3.util.equals(AnalysisRunArgument, a, b); + } +} + +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo + */ +export class VerificationInfo extends Message { + /** + * @generated from field: github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunReference analysis_run = 1; + */ + analysisRun?: AnalysisRunReference; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.VerificationInfo"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "analysis_run", kind: "message", T: AnalysisRunReference }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): VerificationInfo { + return new VerificationInfo().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): VerificationInfo { + return new VerificationInfo().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): VerificationInfo { + return new VerificationInfo().fromJsonString(jsonString, options); + } + + static equals(a: VerificationInfo | PlainMessage | undefined, b: VerificationInfo | PlainMessage | undefined): boolean { + return proto3.util.equals(VerificationInfo, a, b); + } +} + +/** + * @generated from message github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunReference + */ +export class AnalysisRunReference extends Message { + /** + * @generated from field: string namespace = 1; + */ + namespace = ""; + + /** + * @generated from field: string name = 2; + */ + name = ""; + + /** + * @generated from field: string phase = 3; + */ + phase = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "github.com.akuity.kargo.pkg.api.v1alpha1.AnalysisRunReference"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "namespace", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "name", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 3, name: "phase", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): AnalysisRunReference { + return new AnalysisRunReference().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): AnalysisRunReference { + return new AnalysisRunReference().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): AnalysisRunReference { + return new AnalysisRunReference().fromJsonString(jsonString, options); + } + + static equals(a: AnalysisRunReference | PlainMessage | undefined, b: AnalysisRunReference | PlainMessage | undefined): boolean { + return proto3.util.equals(AnalysisRunReference, a, b); + } +} +