Skip to content

Commit

Permalink
GCP location and credentials for Kanister (#4609)
Browse files Browse the repository at this point in the history
  • Loading branch information
Hakan Memisoglu authored and Ilya Kislenko committed Jan 3, 2019
1 parent 6ba94b9 commit 931ee7c
Show file tree
Hide file tree
Showing 21 changed files with 102 additions and 145 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ actions:
then
# Setup wal-e s3 connection parameters.
timeline={{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}
wale_s3_prefix="s3://{{ .Profile.Location.S3Compliant.Bucket }}/postgres-backups/{{ .Deployment.Name }}/${timeline}"
wale_s3_prefix="s3://{{ .Profile.Location.Bucket }}/postgres-backups/{{ .Deployment.Name }}/${timeline}"
echo "${wale_s3_prefix}" > "${env_wal_prefix}"
fi
Expand All @@ -44,8 +44,8 @@ actions:
env_wal_access_key_id="${env_dir}/AWS_ACCESS_KEY_ID"
env_wal_secret_access_key="${env_dir}/AWS_SECRET_ACCESS_KEY"
{{- if .Profile.Location.S3Compliant.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.S3Compliant.Endpoint | quote }}"
{{- if .Profile.Location.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.Endpoint | quote }}"
wale_s3_endpoint=${wale_s3_endpoint,,}
{{- if .Profile.SkipSSLVerify }}
# Since wal-e does not support skip-ssl-verify switch to http://
Expand All @@ -63,8 +63,8 @@ actions:
{{- else }}
# Region is required when no endpoint is used (AWS S3).
wale_s3_region="us-east-1"
{{- if .Profile.Location.S3Compliant.Region }}
wale_s3_region="{{ .Profile.Location.S3Compliant.Region | quote}}"
{{- if .Profile.Location.Region }}
wale_s3_region="{{ .Profile.Location.Region | quote}}"
{{- end }}
echo "${wale_s3_region}" > "${env_wal_region}"
{{- end }}
Expand All @@ -83,13 +83,13 @@ actions:
{{- if .Profile.SkipSSLVerify }}
s3_cmd+=("--no-verify-ssl")
{{- end }}
{{- if .Profile.Location.S3Compliant.Endpoint }}
s3_cmd+=(--endpoint "{{ .Profile.Location.S3Compliant.Endpoint }}")
{{- if .Profile.Location.Endpoint }}
s3_cmd+=(--endpoint "{{ .Profile.Location.Endpoint }}")
{{- end }}
{{- if .Profile.Location.S3Compliant.Region }}
s3_cmd+=(--region "{{ .Profile.Location.S3Compliant.Region | quote}}")
{{- if .Profile.Location.Region }}
s3_cmd+=(--region "{{ .Profile.Location.Region | quote}}")
{{- end }}
s3_path="s3://{{ .Profile.Location.S3Compliant.Bucket }}/postgres-backups/{{ .Deployment.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/manifest.txt"
s3_path="s3://{{ .Profile.Location.Bucket }}/postgres-backups/{{ .Deployment.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/manifest.txt"
s3_cmd+=(s3 cp - "${s3_path}")
set +o xtrace
Expand Down Expand Up @@ -147,13 +147,13 @@ actions:
{{- if .Profile.SkipSSLVerify }}
s3_cmd+=(" --no-verify-ssl")
{{- end }}
{{- if .Profile.Location.S3Compliant.Endpoint }}
s3_cmd+=(--endpoint "{{ .Profile.Location.S3Compliant.Endpoint }}")
{{- if .Profile.Location.Endpoint }}
s3_cmd+=(--endpoint "{{ .Profile.Location.Endpoint }}")
{{- end }}
{{- if .Profile.Location.S3Compliant.Region }}
s3_cmd+=(--region "{{ .Profile.Location.S3Compliant.Region | quote}}")
{{- if .Profile.Location.Region }}
s3_cmd+=(--region "{{ .Profile.Location.Region | quote}}")
{{- end }}
s3_path="s3://{{ .Profile.Location.S3Compliant.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.path }}"
s3_path="s3://{{ .Profile.Location.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.path }}"
s3_cmd+=(s3 cp "${s3_path}" -)
set +o xtrace
Expand All @@ -166,8 +166,8 @@ actions:
# Fetch base backup using the old WALE_S3_PREFIX.
# First need to setup wal-e conf as env vars
{{- if .Profile.Location.S3Compliant.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.S3Compliant.Endpoint | quote}}"
{{- if .Profile.Location.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.Endpoint | quote}}"
wale_s3_endpoint=${wale_s3_endpoint,,}
{{- if .Profile.SkipSSLVerify }}
# Since wal-e does not support skip-ssl-verify switch to http://
Expand All @@ -183,8 +183,8 @@ actions:
# Region will be ignored for S3 compatible object store so skipping.
{{- else }}
# Region is required when no endpoint is used (AWS S3).
{{- if .Profile.Location.S3Compliant.Region }}
wale_s3_region="{{ .Profile.Location.S3Compliant.Region | quote}}"
{{- if .Profile.Location.Region }}
wale_s3_region="{{ .Profile.Location.Region | quote}}"
{{- else }}
wale_s3_region="us-east-1"
{{- end }}
Expand Down Expand Up @@ -226,7 +226,7 @@ actions:
# Recovery is now complete and can switch to new WAL timeline
env_wal_prefix="${pgdata}/env/WALE_S3_PREFIX"
timeline={{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}
wale_s3_prefix="s3://{{ .Profile.Location.S3Compliant.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.prefix }}/${timeline}"
wale_s3_prefix="s3://{{ .Profile.Location.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.prefix }}/${timeline}"
echo "${wale_s3_prefix}" > "${env_wal_prefix}"
- func: ScaleWorkload
name: restartPod
Expand Down Expand Up @@ -267,13 +267,13 @@ actions:
{{- if .Profile.SkipSSLVerify }}
aws_args+=(" --no-verify-ssl")
{{- end }}
{{- if .Profile.Location.S3Compliant.Endpoint }}
aws_args+=(--endpoint "{{ .Profile.Location.S3Compliant.Endpoint }}")
{{- if .Profile.Location.Endpoint }}
aws_args+=(--endpoint "{{ .Profile.Location.Endpoint }}")
{{- end }}
{{- if .Profile.Location.S3Compliant.Region }}
aws_args+=(--region "{{ .Profile.Location.S3Compliant.Region | quote}}")
{{- if .Profile.Location.Region }}
aws_args+=(--region "{{ .Profile.Location.Region | quote}}")
{{- end }}
s3_path="s3://{{ .Profile.Location.S3Compliant.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.path }}"
s3_path="s3://{{ .Profile.Location.Bucket }}/{{ .ArtifactsIn.manifest.KeyValue.path }}"
# Get and parse artifact manifest to discover the timeline and the base-backup name.
Expand All @@ -286,8 +286,8 @@ actions:
aws "${aws_args[@]}" s3 rm --recursive "${base_backup_path}"
# Setup configuration for wal-e.
{{- if .Profile.Location.S3Compliant.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.S3Compliant.Endpoint | quote}}"
{{- if .Profile.Location.Endpoint }}
wale_s3_endpoint="{{ .Profile.Location.Endpoint | quote}}"
wale_s3_endpoint-${wale_s3_endpoint,,}
{{- if .Profile.SkipSSLVerify }}
# Since wal-e does not support skip-ssl-verify switch to http://
Expand All @@ -303,8 +303,8 @@ actions:
# Region will be ignored for S3 compatible object store so skipping.
{{- else }}
# Region is required when no endpoint is used (AWS S3).
{{- if .Profile.Location.S3Compliant.Region }}
wale_s3_region="{{ .Profile.Location.S3Compliant.Region | quote}}"
{{- if .Profile.Location.Region }}
wale_s3_region="{{ .Profile.Location.Region | quote}}"
{{- else }}
wale_s3_region="us-east-1"
{{- end }}
Expand Down
4 changes: 2 additions & 2 deletions examples/time-log/blueprint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ actions:
outputArtifacts:
timeLog:
keyValue:
path: '{{ .Profile.Location.S3Compliant.Bucket }}/time-log'
path: '{{ .Profile.Location.Bucket }}/time-log'
backupIdentifier:
keyValue:
id: '{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02" }}'
Expand All @@ -21,7 +21,7 @@ actions:
pod: "{{ index .Deployment.Pods 0 }}"
container: test-container
includePath: /var/log
backupArtifactPrefix: "{{ .Profile.Location.S3Compliant.Bucket }}/time-log"
backupArtifactPrefix: "{{ .Profile.Location.Bucket }}/time-log"
backupIdentifier: '{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02" }}'
restore:
type: Deployment
Expand Down
9 changes: 4 additions & 5 deletions examples/time-log/s3-profile.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@ metadata:
# Note: Add details of an existing S3 compliant bucket below
location:
type: s3Compliant
s3Compliant:
bucket: XXXX
endpoint: XXXX
prefix: XXXX
region: XXXX
bucket: XXXX
endpoint: XXXX
prefix: XXXX
region: XXXX
credential:
type: keyPair
keyPair:
Expand Down
9 changes: 4 additions & 5 deletions helm/profile/templates/profile.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@ metadata:
{{ include "profile.helmLabels" . | indent 4 }}
location:
type: s3Compliant
s3Compliant:
bucket: {{ required "S3 compatible bucket is required when configuring a profile." .Values.s3.bucket | quote }}
endpoint: {{ .Values.s3.endpoint | quote }}
prefix: {{ .Values.s3.prefix | quote }}
region: {{ .Values.s3.region | quote }}
bucket: {{ required "S3 compatible bucket is required when configuring a profile." .Values.s3.bucket | quote }}
endpoint: {{ .Values.s3.endpoint | quote }}
prefix: {{ .Values.s3.prefix | quote }}
region: {{ .Values.s3.region | quote }}
credential:
type: keyPair
keyPair:
Expand Down
16 changes: 6 additions & 10 deletions pkg/apis/cr/v1alpha1/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -228,21 +228,17 @@ type Profile struct {
type LocationType string

const (
LocationTypeGCS LocationType = "gcs"
LocationTypeS3Compliant LocationType = "s3Compliant"
)

// Location
type Location struct {
Type LocationType `json:"type"`
S3Compliant *S3CompliantLocation `json:"s3Compliant"`
}

// S3Compliant
type S3CompliantLocation struct {
Bucket string `json:"bucket"`
Endpoint string `json:"endpoint"`
Prefix string `json:"prefix"`
Region string `json:"region"`
Type LocationType `json:"type"`
Bucket string `json:"bucket"`
Endpoint string `json:"endpoint"`
Prefix string `json:"prefix"`
Region string `json:"region"`
}

// CredentialType
Expand Down
23 changes: 1 addition & 22 deletions pkg/apis/cr/v1alpha1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 10 additions & 14 deletions pkg/function/backup_data_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,11 @@ var _ = Suite(&BackupDataSuite{})
func newValidProfile() *param.Profile {
return &param.Profile{
Location: crv1alpha1.Location{
Type: crv1alpha1.LocationTypeS3Compliant,
S3Compliant: &crv1alpha1.S3CompliantLocation{
Bucket: "test-bucket",
Endpoint: "",
Prefix: "",
Region: "us-west-1",
},
Type: crv1alpha1.LocationTypeS3Compliant,
Bucket: "test-bucket",
Endpoint: "",
Prefix: "",
Region: "us-west-1",
},
Credential: param.Credential{
Type: param.CredentialTypeKeyPair,
Expand All @@ -37,13 +35,11 @@ func newValidProfile() *param.Profile {
func newInvalidProfile() *param.Profile {
return &param.Profile{
Location: crv1alpha1.Location{
Type: "foo-type",
S3Compliant: &crv1alpha1.S3CompliantLocation{
Bucket: "test-bucket",
Endpoint: "",
Prefix: "",
Region: "us-west-1",
},
Type: "foo-type",
Bucket: "test-bucket",
Endpoint: "",
Prefix: "",
Region: "us-west-1",
},
Credential: param.Credential{
Type: param.CredentialTypeKeyPair,
Expand Down
5 changes: 2 additions & 3 deletions pkg/function/create_volume_from_snapshot_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ func (s *CreateVolumeFromSnapshotTestSuite) TestCreateVolumeFromSnapshot(c *C) {
mockGetter := mockblockstorage.NewGetter()
profile := &param.Profile{
Location: crv1alpha1.Location{
Type: crv1alpha1.LocationTypeS3Compliant,
S3Compliant: &crv1alpha1.S3CompliantLocation{
Region: "us-west-2"},
Type: crv1alpha1.LocationTypeS3Compliant,
Region: "us-west-2",
},
Credential: param.Credential{
Type: param.CredentialTypeKeyPair,
Expand Down
2 changes: 1 addition & 1 deletion pkg/function/create_volume_snapshot.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ func ValidateProfile(profile *param.Profile) error {
if profile.Location.Type != crv1alpha1.LocationTypeS3Compliant {
return errors.New("Location type not supported")
}
if len(profile.Location.S3Compliant.Region) == 0 {
if len(profile.Location.Region) == 0 {
return errors.New("Region is not set")
}
if profile.Credential.Type != param.CredentialTypeKeyPair {
Expand Down
5 changes: 2 additions & 3 deletions pkg/function/create_volume_snapshot_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@ func (s *CreateVolumeSnapshotTestSuite) TestGetPVCInfo(c *C) {
tp := param.TemplateParams{
Profile: &param.Profile{
Location: crv1alpha1.Location{
Type: crv1alpha1.LocationTypeS3Compliant,
S3Compliant: &crv1alpha1.S3CompliantLocation{
Region: "us-west-2"},
Type: crv1alpha1.LocationTypeS3Compliant,
Region: "us-west-2",
},
Credential: param.Credential{
Type: param.CredentialTypeKeyPair,
Expand Down
16 changes: 7 additions & 9 deletions pkg/function/data_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,17 @@ import (
"context"
"fmt"

. "gopkg.in/check.v1"

"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"

kanister "github.com/kanisterio/kanister/pkg"
crv1alpha1 "github.com/kanisterio/kanister/pkg/apis/cr/v1alpha1"
"github.com/kanisterio/kanister/pkg/client/clientset/versioned"
"github.com/kanisterio/kanister/pkg/kube"
"github.com/kanisterio/kanister/pkg/param"
"github.com/kanisterio/kanister/pkg/resource"
"github.com/kanisterio/kanister/pkg/testutil"
. "gopkg.in/check.v1"
"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)

type DataSuite struct {
Expand Down Expand Up @@ -81,7 +79,7 @@ func newRestoreDataBlueprint(pvc string) *crv1alpha1.Blueprint {
Args: map[string]interface{}{
RestoreDataNamespaceArg: "{{ .StatefulSet.Namespace }}",
RestoreDataImageArg: "kanisterio/kanister-tools:0.15.0",
RestoreDataBackupArtifactPrefixArg: "{{ .Profile.Location.S3Compliant.Bucket }}/{{ .Profile.Location.S3Compliant.Prefix }}",
RestoreDataBackupArtifactPrefixArg: "{{ .Profile.Location.Bucket }}/{{ .Profile.Location.Prefix }}",
RestoreDataRestorePathArg: "/mnt/data",
RestoreDataBackupIdentifierArg: "{{ .Time }}",
RestoreDataEncryptionKeyArg: "{{ .Secrets.backupKey.Data.password | toString }}",
Expand Down Expand Up @@ -110,7 +108,7 @@ func newBackupDataBlueprint() *crv1alpha1.Blueprint {
BackupDataPodArg: "{{ index .StatefulSet.Pods 0 }}",
BackupDataContainerArg: "{{ index .StatefulSet.Containers 0 0 }}",
BackupDataIncludePathArg: "/etc",
BackupDataBackupArtifactPrefixArg: "{{ .Profile.Location.S3Compliant.Bucket }}/{{ .Profile.Location.S3Compliant.Prefix }}",
BackupDataBackupArtifactPrefixArg: "{{ .Profile.Location.Bucket }}/{{ .Profile.Location.Prefix }}",
BackupDataBackupIdentifierArg: "{{ .Time }}",
BackupDataEncryptionKeyArg: "{{ .Secrets.backupKey.Data.password | toString }}",
},
Expand Down Expand Up @@ -213,7 +211,7 @@ func newCopyDataTestBlueprint() crv1alpha1.Blueprint {
Args: map[string]interface{}{
CopyVolumeDataNamespaceArg: "{{ .PVC.Namespace }}",
CopyVolumeDataVolumeArg: "{{ .PVC.Name }}",
CopyVolumeDataArtifactPrefixArg: "{{ .Profile.Location.S3Compliant.Bucket }}/{{ .Profile.Location.S3Compliant.Prefix }}/{{ .PVC.Namespace }}/{{ .PVC.Name }}",
CopyVolumeDataArtifactPrefixArg: "{{ .Profile.Location.Bucket }}/{{ .Profile.Location.Prefix }}/{{ .PVC.Namespace }}/{{ .PVC.Name }}",
},
},
},
Expand Down
4 changes: 2 additions & 2 deletions pkg/function/delete_data.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ func generateDeleteCommand(artifact string, profile *param.Profile) []string {
cmd = append(cmd, "export", fmt.Sprintf("AWS_ACCESS_KEY_ID=%s\n", profile.Credential.KeyPair.ID))
// Command to delete from the object store
cmd = append(cmd, "aws")
if profile.Location.S3Compliant.Endpoint != "" {
cmd = append(cmd, "--endpoint", profile.Location.S3Compliant.Endpoint)
if profile.Location.Endpoint != "" {
cmd = append(cmd, "--endpoint", profile.Location.Endpoint)
}
if profile.SkipSSLVerify {
cmd = append(cmd, "--no-verify-ssl")
Expand Down
Loading

0 comments on commit 931ee7c

Please sign in to comment.