Skip to content
This repository has been archived by the owner on Dec 9, 2022. It is now read-only.

Commit

Permalink
Merge pull request #36 from deliveroo/revert-35-LOG-4178-separate-ste…
Browse files Browse the repository at this point in the history
…ps-into-subfolders

Revert "LOG-4178 Separate steps into subfolders"
  • Loading branch information
farthir authored Jul 1, 2019
2 parents 6ac3ddf + 8c87bfd commit 4edc8ea
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 51 deletions.
22 changes: 4 additions & 18 deletions cli/data/get.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ var (
getCommitPath string
getBucket string
getFiles []string
getSubdir string
)

const (
Expand All @@ -46,14 +45,14 @@ const (
)

var getCmd = &cobra.Command{
Use: "get [step/version] [destination path]",
Use: "get [version] [destination path]",
Short: "Fetch data from S3",
Args: cobra.ExactArgs(2),
Long: `Fetch data from a S3 versioned path.
Example:
$ paddle data get -b experimental --bucket roo-pipeline --subdir version1 trained-model/version1 dest/path
$ paddle data get -b experimental --bucket roo-pipeline trained-model/version1 dest/path
$ paddle data get -b experimental --bucket roo-pipeline --files file1.csv,file2.csv trained-model/version1 dest/path
`,
Run: func(cmd *cobra.Command, args []string) {
Expand All @@ -69,7 +68,7 @@ $ paddle data get -b experimental --bucket roo-pipeline --files file1.csv,file2.
path: fmt.Sprintf("%s/%s/%s", args[0], getBranch, getCommitPath),
}

copyPathToDestination(source, args[1], getFiles, getSubdir)
copyPathToDestination(source, args[1], getFiles)
},
}

Expand All @@ -78,10 +77,9 @@ func init() {
getCmd.Flags().StringVar(&getBucket, "bucket", "", "Bucket to use")
getCmd.Flags().StringVarP(&getCommitPath, "path", "p", "HEAD", "Path to fetch (instead of HEAD)")
getCmd.Flags().StringSliceVarP(&getFiles, "files", "f", []string{}, "A list of files to download separated by comma")
getCmd.Flags().StringVarP(&getSubdir, "subdir", "d", "", "Custom subfolder name for export path")
}

func copyPathToDestination(source S3Path, destination string, files []string, subdir string) {
func copyPathToDestination(source S3Path, destination string, files []string) {
session := session.Must(session.NewSessionWithOptions(session.Options{
SharedConfigState: session.SharedConfigEnable,
}))
Expand All @@ -96,9 +94,6 @@ func copyPathToDestination(source S3Path, destination string, files []string, su
if !strings.HasSuffix(source.path, "/") {
source.path += "/"
}
if subdir != "" {
destination = parseDestination(destination, subdir)
}

fmt.Println("Copying " + source.path + " to " + destination)
copy(session, source, destination, files)
Expand All @@ -118,15 +113,6 @@ func readHEAD(session *session.Session, source S3Path) string {
return buf.String()
}

func parseDestination(destination string, subdir string) string {
if !strings.HasSuffix(destination, "/") {
destination += "/" + subdir
} else {
destination += subdir
}
return destination
}

func copy(session *session.Session, source S3Path, destination string, files []string) {
query := &s3.ListObjectsV2Input{
Bucket: aws.String(source.bucket),
Expand Down
1 change: 0 additions & 1 deletion cli/pipeline/pipeline_definition.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ type PipelineDefinitionStep struct {
Branch string `yaml:"branch" json:"branch"`
Path string `yaml:"path" json:"path"`
Bucket string `yaml:"bucket" json:"bucket"`
Subdir string `yaml:"subdir" json:"subdir"`
} `yaml:"inputs" json:"inputs"`
Commands []string `yaml:"commands" json:"commands"`
Resources struct {
Expand Down
15 changes: 5 additions & 10 deletions cli/pipeline/pipeline_definition_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,23 @@ func TestParsePipeline(t *testing.T) {
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

if len(pipeline.Steps) != 2 {
t.Errorf("expected two steps, got %d", len(pipeline.Steps))
t.Errorf("excepted two steps, got %d", len(pipeline.Steps))
}

if pipeline.Bucket != "canoe-sample-pipeline" {
t.Errorf("Expected bucket to be canoe-sample-pipeline, got %s", pipeline.Bucket)
}

subdir := pipeline.Steps[1].Inputs[0].Subdir
if subdir != "step1-version1" {
t.Errorf("expected second step input subdir to be 'step1-version1' but got %s", subdir)
}
}

func TestOverrideTag(t *testing.T) {
data, err := ioutil.ReadFile("test/sample_steps_passing.yml")
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

pipeline.Steps[0].OverrideTag("")

Expand All @@ -51,7 +46,7 @@ func TestOverrideVersion(t *testing.T) {
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

pipeline.Steps[0].OverrideVersion("", true)

Expand All @@ -75,7 +70,7 @@ func TestOverrideBranch(t *testing.T) {
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

pipeline.Steps[0].OverrideBranch("", true)

Expand Down
17 changes: 8 additions & 9 deletions cli/pipeline/run_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,15 @@ package pipeline

import (
"fmt"
"testing"
"time"

v1 "k8s.io/api/core/v1"
"k8s.io/api/core/v1"
k8errors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/kubernetes/fake"
ktesting "k8s.io/client-go/testing"
"testing"
"time"
)

func parseTimeOrDie(ts string) metav1.Time {
Expand Down Expand Up @@ -113,11 +112,11 @@ func TestRunPipelineSuccess(t *testing.T) {
expectPods := [2]string{"sample-steps-passing-version1-step1-master", "sample-steps-passing-version1a-step2-master"}

for _, p := range expectPods {
if deleted[p] != 3 {
t.Errorf("expected delete of "+p+" to be called three times, got %i", deleted[p])
if deleted[p] != 2 {
t.Errorf("excepted delete of "+p+" to be called twice, got %i", deleted[p])
}
if created[p] != 1 {
t.Errorf("expected create of "+p+" to be called once, got %i", created[p])
t.Errorf("excepted create of "+p+" to be called once, got %i", created[p])
}
}
}
Expand Down Expand Up @@ -163,7 +162,7 @@ func TestRunPipelineFailure(t *testing.T) {
runPipeline("test/sample_steps_passing.yml", testRunFlags)

if len(errors) != 2 {
t.Errorf("expected two errors, actual %v", len(errors))
t.Errorf("excepted two errors, actual %v", len(errors))
}
}

Expand Down Expand Up @@ -206,7 +205,7 @@ func TestRunPipelineStartTimeout(t *testing.T) {
runPipeline("test/sample_steps_passing.yml", &flags)

if len(errors) != 2 {
t.Errorf("expected two errors, actual %v", len(errors))
t.Errorf("excepted two errors, actual %v", len(errors))
}
msg := "[paddle] [Timed out waiting for pod to start. Cluster might not have sufficient resources.]"
for _, err := range errors {
Expand Down
2 changes: 1 addition & 1 deletion cli/pipeline/template.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ spec:
- "-c"
- "mkdir -p $INPUT_PATH $OUTPUT_PATH &&
{{ range $index, $input := .Step.Inputs }}
paddle data get {{ $input.Step }}/{{ $input.Version }} $INPUT_PATH -b {{ $input.Branch | sanitizeName }} -p {{ $input.Path }} -d {{ $input.Subdir }} {{ $input.Bucket | bucketParam }} &&
paddle data get {{ $input.Step }}/{{ $input.Version }} $INPUT_PATH -b {{ $input.Branch | sanitizeName }} -p {{ $input.Path }} {{ $input.Bucket | bucketParam }} &&
{{ end }}
touch /data/first-step.txt &&
echo first step finished &&
Expand Down
17 changes: 6 additions & 11 deletions cli/pipeline/template_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ package pipeline

import (
"io/ioutil"
"strings"
"testing"

v1 "k8s.io/api/core/v1"
"k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/util/yaml"
)

Expand All @@ -14,34 +13,30 @@ func TestCompileTemplate(t *testing.T) {
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

podDefinition := NewPodDefinition(pipeline, &pipeline.Steps[1])
podDefinition := NewPodDefinition(pipeline, &pipeline.Steps[0])

stepPodBuffer := podDefinition.compile()

pod := &v1.Pod{}
yaml.NewYAMLOrJSONDecoder(stepPodBuffer, 4096).Decode(pod)

if pod.Name != "sample-steps-passing-version1a-step2-master" {
if pod.Name != "sample-steps-passing-version1-step1-master" {
t.Errorf("Pod name is %s", pod.Name)
}

if pod.Spec.Containers[0].Image != pipeline.Steps[0].Image {
t.Errorf("First image is %s", pod.Spec.Containers[0].Image)
}

if !strings.Contains(pod.Spec.Containers[1].Command[2], "-d step1-version1") {
t.Errorf("Expected paddle get command to contain -d step1-version1, but it did not")
}
}

func TestSecrets(t *testing.T) {
data, err := ioutil.ReadFile("test/sample_steps_passing.yml")
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

podDefinition := NewPodDefinition(pipeline, &pipeline.Steps[0])
secrets := []string{"ENV_VAR:secret_store:key_name"}
Expand Down Expand Up @@ -73,7 +68,7 @@ func TestEnv(t *testing.T) {
if err != nil {
panic(err.Error())
}
pipeline := ParsePipeline(data)
pipeline := parsePipeline(data)

podDefinition := NewPodDefinition(pipeline, &pipeline.Steps[0])
env := []string{"ENV_VAR:env_value"}
Expand Down
1 change: 0 additions & 1 deletion cli/pipeline/test/sample_steps_passing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ steps:
version: version1
branch: master
path: HEAD
subdir: 'step1-version1'
image: 219541440308.dkr.ecr.eu-west-1.amazonaws.com/paddlecontainer:latest
branch: master
commands:
Expand Down

0 comments on commit 4edc8ea

Please sign in to comment.