Skip to content

Commit

Permalink
Merge pull request #131 from kanisterio/sync
Browse files Browse the repository at this point in the history
Phase output params; Objectstore helpers;
  • Loading branch information
tdmanv authored Oct 16, 2018
2 parents 3d73fd9 + f3c0d99 commit 9c1308a
Show file tree
Hide file tree
Showing 48 changed files with 988 additions and 296 deletions.
2 changes: 2 additions & 0 deletions Dockerfile.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
FROM ARG_FROM
MAINTAINER Tom Manville<tom@kasten.io>
ADD ARG_SOURCE_BIN /ARG_BIN
RUN apk -v --update add --no-cache ca-certificates && \
rm -f /var/cache/apk/*
ENTRYPOINT ["/ARG_BIN"]
1 change: 0 additions & 1 deletion docs/templates.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ The TemplateParam struct is defined as:
Deployment DeploymentParams
PVC PVCParams
ArtifactsIn map[string]crv1alpha1.Artifact // A Kanister Artifact
ArtifactsOut map[string]crv1alpha1.Artifact
Profile *Profile
ConfigMaps map[string]v1.ConfigMap
Secrets map[string]v1.Secret
Expand Down
4 changes: 2 additions & 2 deletions docs/tooling.rst
Original file line number Diff line number Diff line change
Expand Up @@ -354,9 +354,9 @@ The following snippet is an example of using kando from inside a Blueprint.
.. code-block:: console
kando location push --profile '{{ .Profile }}' --path '{{ .ArtifactsOut }}' -
kando location push --profile '{{ .Profile }}' --path '/backup/path' -
kando location delete --profile '{{ .Profile }}' --path '{{ .ArtifactsOut }}'
kando location delete --profile '{{ .Profile }}' --path '/backup/path'
Docker Image
Expand Down
4 changes: 2 additions & 2 deletions docs/tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ ConfigMap.
- |
AWS_ACCESS_KEY_ID={{ .Secrets.aws.Data.aws_access_key_id | toString }} \
AWS_SECRET_ACCESS_KEY={{ .Secrets.aws.Data.aws_secret_access_key | toString }} \
aws s3 cp /var/log/time.log {{ .ArtifactsOut.timeLog.KeyValue.path | quote }}
aws s3 cp /var/log/time.log {{ .ConfigMaps.location.Data.path }}/time-log/
EOF
If you re-execute this Kanister Action, you'll be able to see the Artifact in the
Expand Down Expand Up @@ -516,7 +516,7 @@ ConfigMap because the `inputArtifact` contains the fully specified path.
- |
AWS_ACCESS_KEY_ID={{ .Secrets.aws.Data.aws_access_key_id | toString }} \
AWS_SECRET_ACCESS_KEY={{ .Secrets.aws.Data.aws_secret_access_key | toString }} \
aws s3 cp /var/log/time.log {{ .ArtifactsOut.timeLog.KeyValue.path | quote }}
aws s3 cp /var/log/time.log {{ .ConfigMaps.location.Data.path }}/time-log/
restore:
type: Deployment
secretNames:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ actions:
- |
env_prefix="{{ snakecase .StatefulSet.Name | trimSuffix "_data" | upper}}"
cluster_ip=$(env | grep ${env_prefix}_CLIENT_PORT_9200_TCP_ADDR= | sed 's/.*=//')
snapshot_prefix="{{ .ArtifactsOut.esBackup.KeyValue.path }}"
snapshot_prefix="/elasticsearch-backups/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}"
list="$(curl -GET ${cluster_ip}:9200/_cat/indices | awk '{ print $3 }')"
for index in $list
do
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ actions:
then
dump_cmd+=(-u "${MONGO_ADMIN_USER}" -p "${MONGO_ADMIN_PASSWORD}")
fi
${dump_cmd[@]} | kando location push --profile '{{ toJson .Profile }}' --path '{{ .ArtifactsOut.cloudObject.KeyValue.path }}' -
${dump_cmd[@]} | kando location push --profile '{{ toJson .Profile }}' --path '/mongodb-replicaset-backups/{{ .StatefulSet.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/rs_backup.gz' -
restore:
type: StatefulSet
inputArtifactNames:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ actions:
- pipefail
- -c
- |
s3_path="{{ .ArtifactsOut.mysqlCloudDump.KeyValue.path }}"
s3_path="/mysql-backups/{{ .Deployment.Namespace }}/{{ .Deployment.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/dump.sql.gz"
mysqldump -u root --password="${MYSQL_ROOT_PASSWORD}" --single-transaction --all-databases | gzip - | kando location push --profile '{{ toJson .Profile }}' --path ${s3_path} -
restore:
type: Deployment
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ actions:
then
# Setup wal-e s3 connection parameters.
timeline={{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}
wale_s3_prefix="s3://{{ .Profile.Location.S3Compliant.Bucket }}/{{ .ArtifactsOut.manifest.KeyValue.prefix }}/${timeline}"
wale_s3_prefix="s3://{{ .Profile.Location.S3Compliant.Bucket }}/postgres-backups/{{ .Deployment.Name }}/${timeline}"
echo "${wale_s3_prefix}" > "${env_wal_prefix}"
fi
Expand Down Expand Up @@ -89,7 +89,7 @@ actions:
{{- if .Profile.Location.S3Compliant.Region }}
s3_cmd+=(--region "{{ .Profile.Location.S3Compliant.Region | quote}}")
{{- end }}
s3_path="s3://{{ .Profile.Location.S3Compliant.Bucket }}/{{ .ArtifactsOut.manifest.KeyValue.path }}"
s3_path="s3://{{ .Profile.Location.S3Compliant.Bucket }}/postgres-backups/{{ .Deployment.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/manifest.txt"
s3_cmd+=(s3 cp - "${s3_path}")
set +o xtrace
Expand Down
2 changes: 1 addition & 1 deletion examples/mongo-sidecar/blueprint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ actions:
then
cmd=(aws --endpoint https://storage.googleapis.com s3 cp)
fi
${cmd[@]} ${file} "{{ .ArtifactsOut.cloudObject.KeyValue.path }}"
${cmd[@]} ${file} "{{ .ConfigMaps.location.Data.bucket }}/backups/{{ .StatefulSet.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/rs0.tar"
restore:
type: StatefulSet
inputArtifactNames:
Expand Down
2 changes: 1 addition & 1 deletion examples/postgres-basic-pgdump/blueprint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ actions:
export PGPORT=${{ .StatefulSet.Name | upper | replace "-" "_" }}_PORT_5432_TCP_PORT
export PGPASSWORD={{ .Secrets.postgres.Data.password_superuser | toString }}
pg_dumpall -U postgres -c -f backup.tar
aws s3 cp backup.tar "{{ .ArtifactsOut.cloudObject.KeyValue.path }}"
aws s3 cp backup.tar "{{ .ConfigMaps.location.Data.bucket }}/backups/{{ .StatefulSet.Namespace }}/{{ .StatefulSet.Name }}/{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02T15-04-05" }}/pg_backup.tar"
restore:
type: StatefulSet
secretNames:
Expand Down
4 changes: 2 additions & 2 deletions examples/time-log/blueprint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ actions:
pod: "{{ index .Deployment.Pods 0 }}"
container: test-container
includePath: /var/log
backupArtifactPrefix: "{{ .ArtifactsOut.timeLog.KeyValue.path }}"
backupIdentifier: "{{ .ArtifactsOut.backupIdentifier.KeyValue.id }}"
backupArtifactPrefix: "{{ .Profile.Location.S3Compliant.Bucket }}/time-log"
backupIdentifier: "{{ toDate "2006-01-02T15:04:05.999999999Z07:00" .Time | date "2006-01-02" }}"
restore:
type: Deployment
inputArtifactNames:
Expand Down
8 changes: 8 additions & 0 deletions pkg/apis/cr/v1alpha1/deepcopy.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,11 @@ func (in *BlueprintPhase) DeepCopyInto(out *BlueprintPhase) {
// TODO: Handle 'Args'
return
}

// DeepCopyInto handles the Phase deep copies, copying the receiver, writing into out. in must be non-nil.
// This is a workaround to handle the map[string]interface{} output type
func (in *Phase) DeepCopyInto(out *Phase) {
*out = *in
// TODO: Handle 'Output' map[string]interface{}
return
}
5 changes: 3 additions & 2 deletions pkg/apis/cr/v1alpha1/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,9 @@ const (

// Phase is subcomponent of an action.
type Phase struct {
Name string `json:"name"`
State State `json:"state"`
Name string `json:"name"`
State State `json:"state"`
Output map[string]interface{} `json:"output"`
}

// k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
Expand Down
10 changes: 3 additions & 7 deletions pkg/apis/cr/v1alpha1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

59 changes: 47 additions & 12 deletions pkg/controller/controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,8 @@ func (c *Controller) onUpdateActionSet(oldAS, newAS *crv1alpha1.ActionSet) error
if newAS.Status == nil || newAS.Status.State != crv1alpha1.StateRunning {
if newAS.Status == nil {
log.Infof("Updated ActionSet '%s' Status->nil", newAS.Name)
} else if newAS.Status.State == crv1alpha1.StateComplete {
c.logAndSuccessEvent(fmt.Sprintf("Updated ActionSet '%s' Status->%s", newAS.Name, newAS.Status.State), "Update Complete", newAS)
} else {
log.Infof("Updated ActionSet '%s' Status->%s", newAS.Name, newAS.Status.State)
}
Expand All @@ -218,8 +220,9 @@ func (c *Controller) onUpdateActionSet(oldAS, newAS *crv1alpha1.ActionSet) error
}
}
}
newAS.Status.State = crv1alpha1.StateComplete
c.logAndSuccessEvent(fmt.Sprintf("Updated ActionSet '%s' Status->%s", newAS.Name, newAS.Status.State), "Update Complete", newAS)
if len(newAS.Status.Actions) != 0 {
return nil
}
return reconcile.ActionSet(context.TODO(), c.crClient.CrV1alpha1(), newAS.GetNamespace(), newAS.GetName(), func(ras *crv1alpha1.ActionSet) error {
ras.Status.State = crv1alpha1.StateComplete
return nil
Expand Down Expand Up @@ -346,12 +349,6 @@ func (c *Controller) runAction(ctx context.Context, as *crv1alpha1.ActionSet, aI
if err != nil {
return err
}
artTpls := as.Status.Actions[aIDX].Artifacts
arts, err := param.RenderArtifacts(artTpls, *tp)
if err != nil {
return err
}
tp.ArtifactsOut = arts
phases, err := kanister.GetPhases(*bp, action.Name, *tp)
if err != nil {
return err
Expand All @@ -361,9 +358,13 @@ func (c *Controller) runAction(ctx context.Context, as *crv1alpha1.ActionSet, aI
for i, p := range phases {
c.logAndSuccessEvent(fmt.Sprintf("Executing phase %s", p.Name()), "Started Phase", as)
err = param.InitPhaseParams(ctx, c.clientset, tp, p.Name(), p.Objects())
if err == nil {
err = p.Exec(ctx, *tp)
if err != nil {
reason := fmt.Sprintf("ActionSetFailed Action: %s", as.Spec.Actions[aIDX].Name)
msg := fmt.Sprintf("Failed to init phase params: %#v:", as.Status.Actions[aIDX].Phases[i])
c.logAndErrorEvent(msg, reason, err, as, bp)
return
}
output, err := p.Exec(ctx, *bp, action.Name, *tp)
var rf func(*crv1alpha1.ActionSet) error
if err != nil {
rf = func(ras *crv1alpha1.ActionSet) error {
Expand All @@ -373,8 +374,8 @@ func (c *Controller) runAction(ctx context.Context, as *crv1alpha1.ActionSet, aI
}
} else {
rf = func(ras *crv1alpha1.ActionSet) error {
ras.Status.Actions[aIDX].Artifacts = arts
ras.Status.Actions[aIDX].Phases[i].State = crv1alpha1.StateComplete
ras.Status.Actions[aIDX].Phases[i].Output = output
return nil
}
}
Expand All @@ -390,9 +391,43 @@ func (c *Controller) runAction(ctx context.Context, as *crv1alpha1.ActionSet, aI
c.logAndErrorEvent(msg, reason, err, as, bp)
return
}
param.UpdatePhaseParams(ctx, tp, p.Name(), nil)
param.UpdatePhaseParams(ctx, tp, p.Name(), output)
c.logAndSuccessEvent(fmt.Sprintf("Completed phase %s", p.Name()), "Ended Phase", as)
}
// Check if output artifacts are present
artTpls := as.Status.Actions[aIDX].Artifacts
if len(artTpls) == 0 {
// No artifacts, set ActionSetStatus to complete
if rErr := reconcile.ActionSet(ctx, c.crClient.CrV1alpha1(), ns, name, func(ras *crv1alpha1.ActionSet) error {
ras.Status.State = crv1alpha1.StateComplete
return nil
}); rErr != nil {
reason := fmt.Sprintf("ActionSetFailed Action: %s", action.Name)
msg := fmt.Sprintf("Failed to update ActionSet: %s", name)
c.logAndErrorEvent(msg, reason, err, as, bp)
}
return
}
// Render the artifacts
arts, err := param.RenderArtifacts(artTpls, *tp)
if err != nil {
reason := fmt.Sprintf("ActionSetFailed Action: %s", action.Name)
msg := fmt.Sprintf("Failed to render Output Artifacts: %#v:", artTpls)
c.logAndErrorEvent(msg, reason, err, as, bp)
return
}
af := func(ras *crv1alpha1.ActionSet) error {
ras.Status.Actions[aIDX].Artifacts = arts
ras.Status.State = crv1alpha1.StateComplete
return nil
}
// Update ActionSet with artifacts
if aErr := reconcile.ActionSet(ctx, c.crClient.CrV1alpha1(), ns, name, af); aErr != nil {
reason := fmt.Sprintf("ActionSetFailed Action: %s", action.Name)
msg := fmt.Sprintf("Failed to update Output Artifacts: %#v:", artTpls)
c.logAndErrorEvent(msg, reason, aErr, as, bp)
return
}
}()
return nil
}
Expand Down
62 changes: 62 additions & 0 deletions pkg/controller/controller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,32 @@ func (s *ControllerSuite) waitOnActionSetState(c *C, as *crv1alpha1.ActionSet, s
return errors.Wrapf(err, "State '%s' never reached", state)
}

func newBPWithOutputArtifact() *crv1alpha1.Blueprint {
return &crv1alpha1.Blueprint{
ObjectMeta: metav1.ObjectMeta{
GenerateName: "test-blueprint-",
},
Actions: map[string]*crv1alpha1.BlueprintAction{
"myAction": &crv1alpha1.BlueprintAction{
OutputArtifacts: map[string]crv1alpha1.Artifact{
"myArt": crv1alpha1.Artifact{
KeyValue: map[string]string{
"key": "{{ .Phases.myPhase0.Output.key }}",
},
},
},
Kind: "Deployment",
Phases: []crv1alpha1.BlueprintPhase{
{
Name: "myPhase0",
Func: testutil.OutputFuncName,
},
},
},
},
}
}

func (s *ControllerSuite) TestEmptyActionSetStatus(c *C) {
as := &crv1alpha1.ActionSet{
ObjectMeta: metav1.ObjectMeta{
Expand Down Expand Up @@ -221,6 +247,9 @@ func (s *ControllerSuite) TestExecActionSet(c *C) {
{
funcNames: []string{testutil.ArgFuncName, testutil.FailFuncName},
},
{
funcNames: []string{testutil.OutputFuncName},
},
} {
var err error
// Add a blueprint with a mocked kanister function.
Expand Down Expand Up @@ -259,6 +288,8 @@ func (s *ControllerSuite) TestExecActionSet(c *C) {
testutil.ReleaseWaitFunc()
case testutil.ArgFuncName:
c.Assert(testutil.ArgFuncArgs(), DeepEquals, map[string]interface{}{"key": "myValue"})
case testutil.OutputFuncName:
c.Assert(testutil.OutputFuncOut(), DeepEquals, map[string]interface{}{"key": "myValue"})
}
}

Expand Down Expand Up @@ -333,3 +364,34 @@ func (s *ControllerSuite) TestRuntimeObjEventLogs(c *C) {
c.Assert(err, NotNil)
c.Assert(len(events.Items), Equals, 0)
}

func (s *ControllerSuite) TestPhaseOutputAsArtifact(c *C) {
// Create a blueprint that uses func output as artifact
bp := newBPWithOutputArtifact()
bp = testutil.BlueprintWithConfigMap(bp)
bp, err := s.crCli.Blueprints(s.namespace).Create(bp)
c.Assert(err, IsNil)

// Add an actionset that references that blueprint.
as := testutil.NewTestActionSet(s.namespace, bp.GetName(), "Deployment", s.deployment.GetName(), s.namespace)
as = testutil.ActionSetWithConfigMap(as, s.confimap.GetName())
as, err = s.crCli.ActionSets(s.namespace).Create(as)
c.Assert(err, IsNil)

err = s.waitOnActionSetState(c, as, crv1alpha1.StateRunning)
c.Assert(err, IsNil)

// Check if the func returned expected output
c.Assert(testutil.OutputFuncOut(), DeepEquals, map[string]interface{}{"key": "myValue"})

err = s.waitOnActionSetState(c, as, crv1alpha1.StateComplete)
c.Assert(err, IsNil)

// Check if the artifacts got updated correctly
as, err = s.crCli.ActionSets(as.GetNamespace()).Get(as.GetName(), metav1.GetOptions{})
arts := as.Status.Actions[0].Artifacts
c.Assert(arts, NotNil)
c.Assert(arts, HasLen, 1)
keyVal := arts["myArt"].KeyValue
c.Assert(keyVal, DeepEquals, map[string]string{"key": "myValue"})
}
19 changes: 19 additions & 0 deletions pkg/format/format.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package format

import (
"regexp"
"strings"

log "github.com/sirupsen/logrus"
)

func Log(podName string, containerName string, output string) {
if output != "" {
logs := regexp.MustCompile("[\r\n]").Split(output, -1)
for _, l := range logs {
if strings.TrimSpace(l) != "" {
log.Info("Pod: ", podName, " Container: ", containerName, " Out: ", l)
}
}
}
}
Loading

0 comments on commit 9c1308a

Please sign in to comment.