From 7aa10f45bc8106025ba6a0614cf0a1ece8bd51b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Onur=20S=C3=B6nmez?= Date: Mon, 30 Mar 2020 16:58:06 +0300 Subject: [PATCH] Change default copy behaviour to preserve directory structure (#133) * Add --flatten flag instead of --parents * Preserve directory structure as a default * Update Changelog and Readme.md * Simplify prepareUploadDestination * Consolidate copy and upload destination url prep --- CHANGELOG.md | 3 +- README.md | 12 +- command/cp.go | 85 ++----- command/mv.go | 2 +- e2e/cp_test.go | 669 ++++++++++++++++++++++++++++++++++++++----------- e2e/mv_test.go | 9 +- 6 files changed, 565 insertions(+), 215 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a885c1f7..2c543c6a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,8 @@ This is a major release with many breaking changes. - `-ds`, `-dw`, `-us` and `-uw` global flags are no longer available. Multipart concurrency and part size flags are now part of the `cp/mv` command. New replacement flags are `--concurrency | -c` and `--part-size | -p`. ([#110](https://github.com/peak/s5cmd/pull/110)) - +- Dropped `-parents` flag from copy command. Copy behaviour has changed to preserve the directory hierarchy as a default. + Optional `-flatten` flag is added to flatten directory structure. #### Features - Added `mb` command to make buckets. ([#25](https://github.com/peak/s5cmd/issues/25)) diff --git a/README.md b/README.md index bc484a152..4868b6a86 100644 --- a/README.md +++ b/README.md @@ -84,10 +84,10 @@ parallel. `s5cmd` will create the destination directory if it is missing. `file1.gz file2.gz file3.gz` -ℹ️ `s5cmd` flattens the source directory structure by default. If you want to keep -the source directory structure, use the `--parents` flag. +ℹ️ `s5cmd` preserves the source directory structure by default. If you want to flatten +the source directory structure, use the `--flatten` flag. - s5cmd cp --parents 's3://bucket/logs/2020/03/*' logs/ + s5cmd cp 's3://bucket/logs/2020/03/*' logs/ The above command will match the following objects: @@ -111,7 +111,7 @@ logs/19/originals/file3.gz #### Upload multiple files to S3 - s5cmd cp --parents directory/ s3://bucket/ + s5cmd cp directory/ s3://bucket/ Will upload all files at given directory to S3 while keeping the folder hiearchy of the source. @@ -138,7 +138,7 @@ they'll be deleted in a single request. `s5cmd` supports copying objects on the server side as well. - s5cmd cp --parents 's3://bucket/logs/2020/*' s3://bucket/logs/backup/ + s5cmd cp 's3://bucket/logs/2020/*' s3://bucket/logs/backup/ Will copy all the matching objects to the given S3 prefix, respecting the source folder hiearchy. @@ -169,7 +169,7 @@ or `commands.txt` content could look like: ``` -cp --parents s3://bucket/2020/03/* logs/2020/03/ +cp s3://bucket/2020/03/* logs/2020/03/ # line comments are supported rm s3://bucket/2020/03/19/file2.gz diff --git a/command/cp.go b/command/cp.go index 06ddc08ce..9806518f2 100644 --- a/command/cp.go +++ b/command/cp.go @@ -77,8 +77,9 @@ var copyCommandFlags = []cli.Flag{ Usage: "only overwrite destination if source modtime is newer", }, &cli.BoolFlag{ - Name: "parents", - Usage: "create same directory structure of source, starting from the first wildcard", + Name: "flatten", + Aliases: []string{"f"}, + Usage: "flatten directory structure of source, starting from the first wildcard", }, &cli.StringFlag{ Name: "storage-class", @@ -118,7 +119,7 @@ var CopyCommand = &cli.Command{ noClobber: c.Bool("no-clobber"), ifSizeDiffer: c.Bool("if-size-differ"), ifSourceNewer: c.Bool("if-source-newer"), - parents: c.Bool("parents"), + flatten: c.Bool("flatten"), storageClass: storage.LookupClass(c.String("storage-class")), concurrency: c.Int("concurrency"), partSize: c.Int64("part-size") * megabytes, @@ -140,7 +141,7 @@ type Copy struct { noClobber bool ifSizeDiffer bool ifSourceNewer bool - parents bool + flatten bool storageClass storage.StorageClass // s3 options @@ -183,6 +184,11 @@ func (c Copy) Run(ctx context.Context) error { }() isBatch := srcurl.HasGlob() + if !isBatch && !srcurl.IsRemote() { + obj, _ := client.Stat(ctx, srcurl) + isBatch = obj != nil && obj.Type.IsDir() + } + for object := range objch { if object.Type.IsDir() || errorpkg.IsCancelation(object.Err) { continue @@ -223,7 +229,7 @@ func (c Copy) prepareCopyTask( isBatch bool, ) func() error { return func() error { - dsturl = prepareCopyDestination(srcurl, dsturl, c.parents, isBatch) + dsturl = prepareRemoteDestination(srcurl, dsturl, c.flatten, isBatch) err := c.doCopy(ctx, srcurl, dsturl) if err != nil { return &errorpkg.Error{ @@ -244,7 +250,7 @@ func (c Copy) prepareDownloadTask( isBatch bool, ) func() error { return func() error { - dsturl, err := prepareDownloadDestination(ctx, srcurl, dsturl, c.parents, isBatch) + dsturl, err := prepareLocalDestination(ctx, srcurl, dsturl, c.flatten, isBatch) if err != nil { return err } @@ -269,7 +275,7 @@ func (c Copy) prepareUploadTask( isBatch bool, ) func() error { return func() error { - dsturl = prepareUploadDestination(srcurl, dsturl, c.parents, isBatch) + dsturl = prepareRemoteDestination(srcurl, dsturl, c.flatten, isBatch) err := c.doUpload(ctx, srcurl, dsturl) if err != nil { return &errorpkg.Error{ @@ -494,48 +500,36 @@ func (c Copy) shouldOverride(ctx context.Context, srcurl *url.URL, dsturl *url.U return stickyErr } -// prepareCopyDestination will return a new destination URL for local->local -// and remote->remote copy operations. -func prepareCopyDestination( +// prepareRemoteDestination will return a new destination URL for +// remote->remote and local->remote copy operations. +func prepareRemoteDestination( srcurl *url.URL, dsturl *url.URL, - parents bool, + flatten bool, isBatch bool, ) *url.URL { objname := srcurl.Base() - if parents { + if isBatch && !flatten { objname = srcurl.Relative() } - // For remote->remote copy operations, treat as prefix if it has "/" - // suffix. - if dsturl.IsRemote() { - if dsturl.IsPrefix() || dsturl.IsBucket() { - dsturl = dsturl.Join(objname) - } - return dsturl - } - - // Absolute path is given. Use given and local copy operation - // will create missing directories if has one. - if !isBatch { - return dsturl + if dsturl.IsPrefix() || dsturl.IsBucket() { + dsturl = dsturl.Join(objname) } - - return dsturl.Join(objname) + return dsturl } // prepareDownloadDestination will return a new destination URL for -// remote->local and remote->remote copy operations. -func prepareDownloadDestination( +// remote->local copy operations. +func prepareLocalDestination( ctx context.Context, srcurl *url.URL, dsturl *url.URL, - parents bool, + flatten bool, isBatch bool, ) (*url.URL, error) { objname := srcurl.Base() - if parents { + if isBatch && !flatten { objname = srcurl.Relative() } @@ -555,7 +549,7 @@ func prepareDownloadDestination( return nil, err } - if parents { + if isBatch && !flatten { dsturl = dsturl.Join(objname) if err := os.MkdirAll(dsturl.Dir(), os.ModePerm); err != nil { return nil, err @@ -578,27 +572,6 @@ func prepareDownloadDestination( return dsturl, nil } -// prepareUploadDestination will return a new destination URL for local->remote -// operations. -func prepareUploadDestination( - srcurl *url.URL, - dsturl *url.URL, - parents bool, - isBatch bool, -) *url.URL { - // if given destination is a bucket/objname, don't do any join and respect - // the user's destination object name. - if !isBatch && !dsturl.IsBucket() && !dsturl.IsPrefix() { - return dsturl - } - - objname := srcurl.Base() - if parents { - objname = srcurl.Relative() - } - return dsturl.Join(objname) -} - // getObject checks if the object from given url exists. If no object is // found, error and returning object would be nil. func getObject(ctx context.Context, url *url.URL) (*storage.Object, error) { @@ -644,12 +617,6 @@ func Validate(c *cli.Context) error { return fmt.Errorf("target %q can not contain glob characters", dst) } - // --parents is used in conjunction with a wildcard source to deduce - // relative source paths. - if !srcurl.HasGlob() && c.Bool("parents") { - return fmt.Errorf("source argument must contain wildcard if --parents flag is provided") - } - // we don't operate on S3 prefixes for copy and delete operations. if srcurl.IsBucket() || srcurl.IsPrefix() { return fmt.Errorf("source argument must contain wildcard character") diff --git a/command/mv.go b/command/mv.go index ce00140ee..6bc2c6df7 100644 --- a/command/mv.go +++ b/command/mv.go @@ -51,7 +51,7 @@ var MoveCommand = &cli.Command{ noClobber: c.Bool("no-clobber"), ifSizeDiffer: c.Bool("if-size-differ"), ifSourceNewer: c.Bool("if-source-newer"), - parents: c.Bool("parents"), + flatten: c.Bool("flatten"), storageClass: storage.LookupClass(c.String("storage-class")), } diff --git a/e2e/cp_test.go b/e2e/cp_test.go index b9f2dc6f0..c6c33a797 100644 --- a/e2e/cp_test.go +++ b/e2e/cp_test.go @@ -235,7 +235,7 @@ func TestCopyS3PrefixToLocalMustReturnError(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, bucket, objectpath, content)) } -// cp s3://bucket/* dir/ (flat source hiearchy) +// cp --flatten s3://bucket/* dir/ (flat source hiearchy) func TestCopyMultipleFlatS3ObjectsToLocal(t *testing.T) { t.Parallel() @@ -247,34 +247,36 @@ func TestCopyMultipleFlatS3ObjectsToLocal(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + "testfile1.txt": "this is a test file 1", + "a/readme.md": "this is a readme file", + "a/filename-with-hypen.gz": "file has hypen in its name", + "b/another_test_file.txt": "yet another txt file. yatf.", } for filename, content := range filesToContent { putFile(t, s3client, bucket, filename, content) } - cmd := s5cmd("cp", "s3://"+bucket+"/*", ".") + cmd := s5cmd("cp", "--flatten", "s3://"+bucket+"/*", ".") result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp s3://%v/another_test_file.txt another_test_file.txt`, bucket), - 2: equals(`cp s3://%v/filename-with-hypen.gz filename-with-hypen.gz`, bucket), - 3: equals(`cp s3://%v/readme.md readme.md`, bucket), + 1: equals(`cp s3://%v/a/filename-with-hypen.gz filename-with-hypen.gz`, bucket), + 2: equals(`cp s3://%v/a/readme.md readme.md`, bucket), + 3: equals(`cp s3://%v/b/another_test_file.txt another_test_file.txt`, bucket), 4: equals(`cp s3://%v/testfile1.txt testfile1.txt`, bucket), }, sortInput(true)) // assert local filesystem - var expectedFiles []fs.PathOp - for filename, content := range filesToContent { - pathop := fs.WithFile(filename, content, fs.WithMode(0644)) - expectedFiles = append(expectedFiles, pathop) + // expect flattened directory structure + var expectedFiles = []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), } expected := fs.Expected(t, expectedFiles...) assert.Assert(t, fs.Equal(cmd.Dir, expected)) @@ -285,7 +287,7 @@ func TestCopyMultipleFlatS3ObjectsToLocal(t *testing.T) { } } -// cp s3://bucket/*.txt dir/ +// cp --flatten s3://bucket/*.txt dir/ func TestCopyMultipleFlatS3ObjectsToLocalWithPartialMatching(t *testing.T) { t.Parallel() @@ -297,24 +299,24 @@ func TestCopyMultipleFlatS3ObjectsToLocalWithPartialMatching(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file", + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "filename-with-hypen.gz": "file has hypen in its name", + "dir/another_test_file.txt": "yet another txt file", } for filename, content := range filesToContent { putFile(t, s3client, bucket, filename, content) } - cmd := s5cmd("cp", "s3://"+bucket+"/*.txt", ".") + cmd := s5cmd("cp", "--flatten", "s3://"+bucket+"/*.txt", ".") result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp s3://%v/another_test_file.txt another_test_file.txt`, bucket), + 1: equals(`cp s3://%v/dir/another_test_file.txt another_test_file.txt`, bucket), 2: equals(`cp s3://%v/testfile1.txt testfile1.txt`, bucket), }, sortInput(true)) @@ -333,7 +335,7 @@ func TestCopyMultipleFlatS3ObjectsToLocalWithPartialMatching(t *testing.T) { } // cp s3://bucket/*/*.txt dir/ -func TestCopyMultipleNestedS3ObjectsToLocalWithPartialMatching(t *testing.T) { +func TestCopyMultipleFlatNestedS3ObjectsToLocalWithPartialMatching(t *testing.T) { t.Parallel() bucket := s3BucketFromTestName(t) @@ -353,7 +355,7 @@ func TestCopyMultipleNestedS3ObjectsToLocalWithPartialMatching(t *testing.T) { putFile(t, s3client, bucket, filename, content) } - cmd := s5cmd("cp", "s3://"+bucket+"/*/*.txt", ".") + cmd := s5cmd("cp", "--flatten", "s3://"+bucket+"/*/*.txt", ".") result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) @@ -373,7 +375,7 @@ func TestCopyMultipleNestedS3ObjectsToLocalWithPartialMatching(t *testing.T) { } } -// --json cp s3://bucket/* . +// --json cp --flatten s3://bucket/* . func TestCopyMultipleFlatS3ObjectsToLocalJSON(t *testing.T) { t.Parallel() @@ -385,17 +387,17 @@ func TestCopyMultipleFlatS3ObjectsToLocalJSON(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", } for filename, content := range filesToContent { putFile(t, s3client, bucket, filename, content) } - cmd := s5cmd("-json", "cp", "s3://"+bucket+"/*", ".") + cmd := s5cmd("-json", "cp", "--flatten", "s3://"+bucket+"/*", ".") result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) @@ -406,7 +408,7 @@ func TestCopyMultipleFlatS3ObjectsToLocalJSON(t *testing.T) { { "operation": "cp", "success": true, - "source": "s3://%v/another_test_file.txt", + "source": "s3://%v/a/another_test_file.txt", "destination": "another_test_file.txt", "object":{ "type": "file", @@ -418,7 +420,7 @@ func TestCopyMultipleFlatS3ObjectsToLocalJSON(t *testing.T) { { "operation": "cp", "success": true, - "source": "s3://%v/filename-with-hypen.gz", + "source": "s3://%v/b/filename-with-hypen.gz", "destination": "filename-with-hypen.gz", "object": { "type": "file", @@ -453,10 +455,12 @@ func TestCopyMultipleFlatS3ObjectsToLocalJSON(t *testing.T) { }, sortInput(true), jsonCheck(true)) // assert local filesystem - var expectedFiles []fs.PathOp - for filename, content := range filesToContent { - pathop := fs.WithFile(filename, content, fs.WithMode(0644)) - expectedFiles = append(expectedFiles, pathop) + // expect flattened directory structure + var expectedFiles = []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), } expected := fs.Expected(t, expectedFiles...) assert.Assert(t, fs.Equal(cmd.Dir, expected)) @@ -479,12 +483,9 @@ func TestCopyMultipleNestedS3ObjectsToLocal(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "a/readme.md": "this is a readme file", - "a/b/filename-with-hypen.gz": "file has hypen in its name", - // these 2 files are the same. we expect that only 1 file with this - // filename reside in the working directory because 's5cmd cp - // s3://.../* .' will flatten the folder hiearchy. + "testfile1.txt": "this is a test file 1", + "a/readme.md": "this is a readme file", + "a/b/filename-with-hypen.gz": "file has hypen in its name", "b/another_test_file.txt": "yet another txt file. yatf.", "c/d/e/another_test_file.txt": "yet another txt file. yatf.", } @@ -500,20 +501,38 @@ func TestCopyMultipleNestedS3ObjectsToLocal(t *testing.T) { assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp s3://%v/a/b/filename-with-hypen.gz filename-with-hypen.gz`, bucket), - 2: equals(`cp s3://%v/a/readme.md readme.md`, bucket), - 3: equals(`cp s3://%v/b/another_test_file.txt another_test_file.txt`, bucket), - 4: equals(`cp s3://%v/c/d/e/another_test_file.txt another_test_file.txt`, bucket), + 1: equals(`cp s3://%v/a/b/filename-with-hypen.gz a/b/filename-with-hypen.gz`, bucket), + 2: equals(`cp s3://%v/a/readme.md a/readme.md`, bucket), + 3: equals(`cp s3://%v/b/another_test_file.txt b/another_test_file.txt`, bucket), + 4: equals(`cp s3://%v/c/d/e/another_test_file.txt c/d/e/another_test_file.txt`, bucket), 5: equals(`cp s3://%v/testfile1.txt testfile1.txt`, bucket), }, sortInput(true)) // assert local filesystem - var expectedFiles []fs.PathOp - for filename, content := range filesToContent { - // trim nested folder structure because s5cmd will flatten the folder - // hiearchy - pathop := fs.WithFile(filepath.Base(filename), content, fs.WithMode(0644)) - expectedFiles = append(expectedFiles, pathop) + var expectedFiles = []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithDir( + "a", + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + ), + fs.WithDir( + "b", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "c", + fs.WithDir( + "d", + fs.WithDir( + "e", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + ), + ), } expected := fs.Expected(t, expectedFiles...) assert.Assert(t, fs.Equal(cmd.Dir, expected)) @@ -524,8 +543,8 @@ func TestCopyMultipleNestedS3ObjectsToLocal(t *testing.T) { } } -// cp --parents s3://bucket/*/*.ext dir/ -func TestCopyMultipleNestedS3ObjectsToLocalWithParents(t *testing.T) { +// cp s3://bucket/*/*.ext dir/ +func TestCopyMultipleNestedS3ObjectsToLocalWithPartial(t *testing.T) { t.Parallel() bucket := s3BucketFromTestName(t) @@ -547,7 +566,7 @@ func TestCopyMultipleNestedS3ObjectsToLocalWithParents(t *testing.T) { putFile(t, s3client, bucket, filename, content) } - cmd := s5cmd("cp", "--parents", "s3://"+bucket+"/*/*.txt", ".") + cmd := s5cmd("cp", "s3://"+bucket+"/*/*.txt", ".") result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) @@ -597,10 +616,10 @@ func TestCopyMultipleS3ObjectsToGivenLocalDirectory(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", } for filename, content := range filesToContent { @@ -615,17 +634,24 @@ func TestCopyMultipleS3ObjectsToGivenLocalDirectory(t *testing.T) { assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp s3://%v/another_test_file.txt %v/another_test_file.txt`, bucket, dst), - 2: equals(`cp s3://%v/filename-with-hypen.gz %v/filename-with-hypen.gz`, bucket, dst), + 1: equals(`cp s3://%v/a/another_test_file.txt %v/a/another_test_file.txt`, bucket, dst), + 2: equals(`cp s3://%v/b/filename-with-hypen.gz %v/b/filename-with-hypen.gz`, bucket, dst), 3: equals(`cp s3://%v/readme.md %v/readme.md`, bucket, dst), 4: equals(`cp s3://%v/testfile1.txt %v/testfile1.txt`, bucket, dst), }, sortInput(true)) // assert local filesystem - var expectedFiles []fs.PathOp - for filename, content := range filesToContent { - pathop := fs.WithFile(filename, content, fs.WithMode(0644)) - expectedFiles = append(expectedFiles, pathop) + var expectedFiles = []fs.PathOp{ + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithFile("testfile1.txt", "this is a test file 1"), } expected := fs.Expected(t, fs.WithDir(dst, expectedFiles...)) assert.Assert(t, fs.Equal(cmd.Dir, expected)) @@ -777,9 +803,58 @@ func TestCopyDirToS3(t *testing.T) { assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp %s/c/file2.txt %sfile2.txt`, srcpath, dstpath), - 2: equals(`cp %s/file1.txt %sfile1.txt`, srcpath, dstpath), - 3: equals(`cp %s/readme.md %sreadme.md`, srcpath, dstpath), + 1: equals(`cp %v/c/file2.txt %vc/file2.txt`, srcpath, dstpath), + 2: equals(`cp %v/file1.txt %vfile1.txt`, srcpath, dstpath), + 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dstpath), + }, sortInput(true)) + + // assert local filesystem + expected := fs.Expected(t, folderLayout...) + assert.Assert(t, fs.Equal(workdir.Path(), expected)) + + // assert s3 + assert.Assert(t, ensureS3Object(s3client, bucket, "file1.txt", "this is the first test file")) + assert.Assert(t, ensureS3Object(s3client, bucket, "readme.md", "this is a readme file")) + assert.Assert(t, ensureS3Object(s3client, bucket, "c/file2.txt", "this is the second test file")) +} + +// cp --flatten dir/ s3://bucket/ +func TestFlattenCopyDirToS3(t *testing.T) { + t.Parallel() + + bucket := s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, bucket) + + folderLayout := []fs.PathOp{ + fs.WithFile("file1.txt", "this is the first test file"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "c", + fs.WithFile("file2.txt", "this is the second test file"), + ), + } + + workdir := fs.NewDir(t, t.Name(), folderLayout...) + defer workdir.Remove() + srcpath := workdir.Path() + dstpath := fmt.Sprintf("s3://%v/", bucket) + + // this command ('s5cmd cp dir/ s3://bucket/') will run in 'walk' mode, + // which is different than 'glob' mode. + cmd := s5cmd("cp", "--flatten", workdir.Path()+"/", dstpath) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(""), + 1: equals(`cp %v/c/file2.txt %vfile2.txt`, srcpath, dstpath), + 2: equals(`cp %v/file1.txt %vfile1.txt`, srcpath, dstpath), + 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dstpath), }, sortInput(true)) // assert local filesystem @@ -804,20 +879,20 @@ func TestCopyMultipleFilesToS3Bucket(t *testing.T) { createBucket(t, s3client, bucket) - filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", - } - - var files []fs.PathOp - for filename, content := range filesToContent { - op := fs.WithFile(filename, content) - files = append(files, op) + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), } - workdir := fs.NewDir(t, "somedir", files...) + workdir := fs.NewDir(t, "somedir", folderLayout...) dstpath := fmt.Sprintf("s3://%v/", bucket) srcpath := workdir.Path() defer workdir.Remove() @@ -828,24 +903,32 @@ func TestCopyMultipleFilesToS3Bucket(t *testing.T) { result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp %s/another_test_file.txt %sanother_test_file.txt`, srcpath, dstpath), - 2: equals(`cp %s/filename-with-hypen.gz %vfilename-with-hypen.gz`, srcpath, dstpath), - 3: equals(`cp %s/readme.md %vreadme.md`, srcpath, dstpath), - 4: equals(`cp %s/testfile1.txt %vtestfile1.txt`, srcpath, dstpath), + 1: equals(`cp %v/a/another_test_file.txt %va/another_test_file.txt`, srcpath, dstpath), + 2: equals(`cp %v/b/filename-with-hypen.gz %vb/filename-with-hypen.gz`, srcpath, dstpath), + 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dstpath), + 4: equals(`cp %v/testfile1.txt %vtestfile1.txt`, srcpath, dstpath), }, sortInput(true)) // assert local filesystem - expected := fs.Expected(t, files...) + expected := fs.Expected(t, folderLayout...) assert.Assert(t, fs.Equal(workdir.Path(), expected)) + expectedS3Content := map[string]string{ + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", + } + // assert s3 - for filename, content := range filesToContent { + for filename, content := range expectedS3Content { assert.Assert(t, ensureS3Object(s3client, bucket, filename, content)) } } -// cp dir/* s3://bucket/prefix -func TestCopyMultipleFilesToS3WithPrefixWithoutSlash(t *testing.T) { +// cp --flatten dir/* s3://bucket/ +func TestFlattenCopyMultipleFilesToS3Bucket(t *testing.T) { + t.Parallel() bucket := s3BucketFromTestName(t) @@ -855,20 +938,79 @@ func TestCopyMultipleFilesToS3WithPrefixWithoutSlash(t *testing.T) { createBucket(t, s3client, bucket) - filesToContent := map[string]string{ + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + } + + workdir := fs.NewDir(t, "somedir", folderLayout...) + dstpath := fmt.Sprintf("s3://%v/", bucket) + srcpath := workdir.Path() + defer workdir.Remove() + + cmd := s5cmd("cp", "--flatten", srcpath+"/*", dstpath) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(""), + 1: equals(`cp %v/a/another_test_file.txt %vanother_test_file.txt`, srcpath, dstpath), + 2: equals(`cp %v/b/filename-with-hypen.gz %vfilename-with-hypen.gz`, srcpath, dstpath), + 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dstpath), + 4: equals(`cp %v/testfile1.txt %vtestfile1.txt`, srcpath, dstpath), + }, sortInput(true)) + + // assert local filesystem + expected := fs.Expected(t, folderLayout...) + assert.Assert(t, fs.Equal(workdir.Path(), expected)) + + expectedS3Content := map[string]string{ "testfile1.txt": "this is a test file 1", "readme.md": "this is a readme file", "filename-with-hypen.gz": "file has hypen in its name", "another_test_file.txt": "yet another txt file. yatf.", } - var files []fs.PathOp - for filename, content := range filesToContent { - op := fs.WithFile(filename, content) - files = append(files, op) + // assert s3 + for filename, content := range expectedS3Content { + assert.Assert(t, ensureS3Object(s3client, bucket, filename, content)) } +} - workdir := fs.NewDir(t, "somedir", files...) +// cp dir/* s3://bucket/prefix (error) +func TestCopyMultipleFilesToS3WithPrefixWithoutSlash(t *testing.T) { + t.Parallel() + + bucket := s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, bucket) + + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + } + + workdir := fs.NewDir(t, "somedir", folderLayout...) defer workdir.Remove() src := fmt.Sprintf("%v/*", workdir.Path()) @@ -885,7 +1027,7 @@ func TestCopyMultipleFilesToS3WithPrefixWithoutSlash(t *testing.T) { }) // assert local filesystem - expected := fs.Expected(t, files...) + expected := fs.Expected(t, folderLayout...) assert.Assert(t, fs.Equal(workdir.Path(), expected)) } @@ -900,20 +1042,82 @@ func TestCopyMultipleFilesToS3WithPrefixWithSlash(t *testing.T) { createBucket(t, s3client, bucket) - filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), } - var files []fs.PathOp - for filename, content := range filesToContent { - op := fs.WithFile(filename, content) - files = append(files, op) + workdir := fs.NewDir(t, "somedir", folderLayout...) + defer workdir.Remove() + + srcpath := workdir.Path() + + src := fmt.Sprintf("%v/*", srcpath) + dst := fmt.Sprintf("s3://%v/prefix/", bucket) + + cmd := s5cmd("cp", src, dst) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(""), + 1: equals(`cp %v/a/another_test_file.txt %va/another_test_file.txt`, srcpath, dst), + 2: equals(`cp %v/b/filename-with-hypen.gz %vb/filename-with-hypen.gz`, srcpath, dst), + 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dst), + 4: equals(`cp %v/testfile1.txt %vtestfile1.txt`, srcpath, dst), + }, sortInput(true)) + + // assert local filesystem + expected := fs.Expected(t, folderLayout...) + assert.Assert(t, fs.Equal(workdir.Path(), expected)) + + expectedS3Content := map[string]string{ + "prefix/testfile1.txt": "this is a test file 1", + "prefix/readme.md": "this is a readme file", + "prefix/b/filename-with-hypen.gz": "file has hypen in its name", + "prefix/a/another_test_file.txt": "yet another txt file. yatf.", } - workdir := fs.NewDir(t, "somedir", files...) + // assert s3 + for key, content := range expectedS3Content { + assert.Assert(t, ensureS3Object(s3client, bucket, key, content)) + } +} + +// cp --flatten dir/* s3://bucket/prefix/ +func TestFlattenCopyMultipleFilesToS3WithPrefixWithSlash(t *testing.T) { + t.Parallel() + + bucket := s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, bucket) + + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + } + + workdir := fs.NewDir(t, "somedir", folderLayout...) defer workdir.Remove() srcpath := workdir.Path() @@ -921,27 +1125,33 @@ func TestCopyMultipleFilesToS3WithPrefixWithSlash(t *testing.T) { src := fmt.Sprintf("%v/*", srcpath) dst := fmt.Sprintf("s3://%v/prefix/", bucket) - cmd := s5cmd("cp", src, dst) + cmd := s5cmd("cp", "--flatten", src, dst) result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp %v/another_test_file.txt %vanother_test_file.txt`, srcpath, dst), - 2: equals(`cp %v/filename-with-hypen.gz %vfilename-with-hypen.gz`, srcpath, dst), + 1: equals(`cp %v/a/another_test_file.txt %vanother_test_file.txt`, srcpath, dst), + 2: equals(`cp %v/b/filename-with-hypen.gz %vfilename-with-hypen.gz`, srcpath, dst), 3: equals(`cp %v/readme.md %vreadme.md`, srcpath, dst), 4: equals(`cp %v/testfile1.txt %vtestfile1.txt`, srcpath, dst), }, sortInput(true)) // assert local filesystem - expected := fs.Expected(t, files...) + expected := fs.Expected(t, folderLayout...) assert.Assert(t, fs.Equal(workdir.Path(), expected)) + expectedS3Content := map[string]string{ + "prefix/testfile1.txt": "this is a test file 1", + "prefix/readme.md": "this is a readme file", + "prefix/filename-with-hypen.gz": "file has hypen in its name", + "prefix/another_test_file.txt": "yet another txt file. yatf.", + } + // assert s3 - for filename, content := range filesToContent { - objpath := "prefix/" + filename - assert.Assert(t, ensureS3Object(s3client, bucket, objpath, content)) + for key, content := range expectedS3Content { + assert.Assert(t, ensureS3Object(s3client, bucket, key, content)) } } @@ -956,46 +1166,115 @@ func TestCopyLocalDirectoryToS3WithPrefixWithSlash(t *testing.T) { createBucket(t, s3client, bucket) - filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), } - var files []fs.PathOp - for filename, content := range filesToContent { - op := fs.WithFile(filename, content) - files = append(files, op) + workdir := fs.NewDir(t, "somedir", folderLayout...) + defer workdir.Remove() + + src := fmt.Sprintf("%v/", workdir.Path()) + dst := fmt.Sprintf("s3://%v/prefix/", bucket) + + cmd := s5cmd("cp", src, dst) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(""), + 1: equals(`cp %va/another_test_file.txt %va/another_test_file.txt`, src, dst), + 2: equals(`cp %vb/filename-with-hypen.gz %vb/filename-with-hypen.gz`, src, dst), + 3: equals(`cp %vreadme.md %vreadme.md`, src, dst), + 4: equals(`cp %vtestfile1.txt %vtestfile1.txt`, src, dst), + }, sortInput(true)) + + // assert local filesystem + expected := fs.Expected(t, folderLayout...) + assert.Assert(t, fs.Equal(workdir.Path(), expected)) + + expectedS3Content := map[string]string{ + "prefix/testfile1.txt": "this is a test file 1", + "prefix/readme.md": "this is a readme file", + "prefix/b/filename-with-hypen.gz": "file has hypen in its name", + "prefix/a/another_test_file.txt": "yet another txt file. yatf.", } - workdir := fs.NewDir(t, "somedir", files...) + // assert s3 + for key, content := range expectedS3Content { + assert.Assert(t, ensureS3Object(s3client, bucket, key, content)) + } +} + +// cp --flatten dir/ s3://bucket/prefix/ +func TestFlattenCopyLocalDirectoryToS3WithPrefixWithSlash(t *testing.T) { + t.Parallel() + + bucket := s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, bucket) + + folderLayout := []fs.PathOp{ + fs.WithFile("testfile1.txt", "this is a test file 1"), + fs.WithFile("readme.md", "this is a readme file"), + fs.WithDir( + "a", + fs.WithFile("another_test_file.txt", "yet another txt file. yatf."), + ), + fs.WithDir( + "b", + fs.WithDir( + "c", + fs.WithFile("filename-with-hypen.gz", "file has hypen in its name"), + ), + ), + } + + workdir := fs.NewDir(t, "somedir", folderLayout...) defer workdir.Remove() src := fmt.Sprintf("%v/", workdir.Path()) dst := fmt.Sprintf("s3://%v/prefix/", bucket) - cmd := s5cmd("cp", src, dst) + cmd := s5cmd("cp", "--flatten", src, dst) result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp %vanother_test_file.txt %vanother_test_file.txt`, src, dst), - 2: equals(`cp %vfilename-with-hypen.gz %vfilename-with-hypen.gz`, src, dst), + 1: equals(`cp %va/another_test_file.txt %vanother_test_file.txt`, src, dst), + 2: equals(`cp %vb/c/filename-with-hypen.gz %vfilename-with-hypen.gz`, src, dst), 3: equals(`cp %vreadme.md %vreadme.md`, src, dst), 4: equals(`cp %vtestfile1.txt %vtestfile1.txt`, src, dst), }, sortInput(true)) // assert local filesystem - expected := fs.Expected(t, files...) + expected := fs.Expected(t, folderLayout...) assert.Assert(t, fs.Equal(workdir.Path(), expected)) + expectedS3Content := map[string]string{ + "prefix/testfile1.txt": "this is a test file 1", + "prefix/readme.md": "this is a readme file", + "prefix/filename-with-hypen.gz": "file has hypen in its name", + "prefix/another_test_file.txt": "yet another txt file. yatf.", + } + // assert s3 - for filename, content := range filesToContent { - objpath := "prefix/" + filename - assert.Assert(t, ensureS3Object(s3client, bucket, objpath, content)) + for key, content := range expectedS3Content { + assert.Assert(t, ensureS3Object(s3client, bucket, key, content)) } } @@ -1072,7 +1351,7 @@ func TestCopySingleS3ObjectToS3(t *testing.T) { result.Assert(t, icmd.Success) assertLines(t, result.Stdout(), map[int]compareFunc{ - 0: suffix(`cp %v %v`, src, dst), + 0: equals(`cp %v %v`, src, dst), 1: equals(""), }) @@ -1176,6 +1455,46 @@ func TestCopySingleS3ObjectIntoAnotherBucket(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, dstbucket, filename, content)) } +// cp --flatten s3://bucket/object s3://bucket2/ +func TestFlattenCopySingleS3ObjectIntoAnotherBucket(t *testing.T) { + t.Parallel() + + srcbucket := s3BucketFromTestName(t) + dstbucket := "copy-" + s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, srcbucket) + createBucket(t, s3client, dstbucket) + + const ( + filename = "testfile1.txt" + content = "this is a file content" + ) + + putFile(t, s3client, srcbucket, filename, content) + + src := fmt.Sprintf("s3://%v/%v", srcbucket, filename) + dst := fmt.Sprintf("s3://%v/", dstbucket) + + cmd := s5cmd("cp", "--flatten", src, dst) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(`cp %v %v%v`, src, dst, filename), + 1: equals(""), + }) + + // assert s3 source object + assert.Assert(t, ensureS3Object(s3client, srcbucket, filename, content)) + + // assert s3 destination object + assert.Assert(t, ensureS3Object(s3client, dstbucket, filename, content)) +} + // cp s3://bucket/object s3://bucket2/object func TestCopySingleS3ObjectIntoAnotherBucketWithObjName(t *testing.T) { t.Parallel() @@ -1268,10 +1587,10 @@ func TestCopyMultipleS3ObjectsToS3WithPrefix(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", } for filename, content := range filesToContent { @@ -1288,8 +1607,8 @@ func TestCopyMultipleS3ObjectsToS3WithPrefix(t *testing.T) { assertLines(t, result.Stdout(), map[int]compareFunc{ 0: equals(""), - 1: equals(`cp s3://%v/another_test_file.txt %vanother_test_file.txt`, bucket, dst), - 2: equals(`cp s3://%v/filename-with-hypen.gz %vfilename-with-hypen.gz`, bucket, dst), + 1: equals(`cp s3://%v/a/another_test_file.txt %va/another_test_file.txt`, bucket, dst), + 2: equals(`cp s3://%v/b/filename-with-hypen.gz %vb/filename-with-hypen.gz`, bucket, dst), 3: equals(`cp s3://%v/readme.md %vreadme.md`, bucket, dst), 4: equals(`cp s3://%v/testfile1.txt %vtestfile1.txt`, bucket, dst), }, sortInput(true)) @@ -1305,6 +1624,62 @@ func TestCopyMultipleS3ObjectsToS3WithPrefix(t *testing.T) { } } +// cp --flatten s3://bucket/* s3://bucket/prefix/ +func TestFlattenCopyMultipleS3ObjectsToS3WithPrefix(t *testing.T) { + t.Parallel() + + bucket := s3BucketFromTestName(t) + + s3client, s5cmd, cleanup := setup(t) + defer cleanup() + + createBucket(t, s3client, bucket) + + filesToContent := map[string]string{ + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", + } + + for filename, content := range filesToContent { + putFile(t, s3client, bucket, filename, content) + } + + src := fmt.Sprintf("s3://%v/*", bucket) + dst := fmt.Sprintf("s3://%v/dst/", bucket) + + cmd := s5cmd("cp", "--flatten", src, dst) + result := icmd.RunCmd(cmd) + + result.Assert(t, icmd.Success) + + assertLines(t, result.Stdout(), map[int]compareFunc{ + 0: equals(""), + 1: equals(`cp s3://%v/a/another_test_file.txt %vanother_test_file.txt`, bucket, dst), + 2: equals(`cp s3://%v/b/filename-with-hypen.gz %vfilename-with-hypen.gz`, bucket, dst), + 3: equals(`cp s3://%v/readme.md %vreadme.md`, bucket, dst), + 4: equals(`cp s3://%v/testfile1.txt %vtestfile1.txt`, bucket, dst), + }, sortInput(true)) + + // assert s3 source objects + for filename, content := range filesToContent { + assert.Assert(t, ensureS3Object(s3client, bucket, filename, content)) + } + + dstContent := map[string]string{ + "dst/testfile1.txt": "this is a test file 1", + "dst/readme.md": "this is a readme file", + "dst/filename-with-hypen.gz": "file has hypen in its name", + "dst/another_test_file.txt": "yet another txt file. yatf.", + } + + // assert s3 destination objects + for key, content := range dstContent { + assert.Assert(t, ensureS3Object(s3client, bucket, key, content)) + } +} + // cp s3://bucket/* s3://bucket/prefix func TestCopyMultipleS3ObjectsToS3WithPrefixWithoutSlash(t *testing.T) { t.Parallel() @@ -1317,10 +1692,10 @@ func TestCopyMultipleS3ObjectsToS3WithPrefixWithoutSlash(t *testing.T) { createBucket(t, s3client, bucket) filesToContent := map[string]string{ - "testfile1.txt": "this is a test file 1", - "readme.md": "this is a readme file", - "filename-with-hypen.gz": "file has hypen in its name", - "another_test_file.txt": "yet another txt file. yatf.", + "testfile1.txt": "this is a test file 1", + "readme.md": "this is a readme file", + "b/filename-with-hypen.gz": "file has hypen in its name", + "a/another_test_file.txt": "yet another txt file. yatf.", } for filename, content := range filesToContent { @@ -1416,7 +1791,7 @@ func TestCopyMultipleS3ObjectsToS3JSON(t *testing.T) { } } -// cp -u -s --parents s3://bucket/prefix/* s3://bucket/prefix2/ +// cp -u -s s3://bucket/prefix/* s3://bucket/prefix2/ func TestCopyMultipleS3ObjectsToS3_Issue70(t *testing.T) { t.Parallel() @@ -1439,7 +1814,7 @@ func TestCopyMultipleS3ObjectsToS3_Issue70(t *testing.T) { src := fmt.Sprintf("s3://%v/config/.local/*", bucket) dst := fmt.Sprintf("s3://%v/.local/", bucket) - cmd := s5cmd("cp", "-u", "-s", "--parents", src, dst) + cmd := s5cmd("cp", "-u", "-s", src, dst) result := icmd.RunCmd(cmd) result.Assert(t, icmd.Success) @@ -1665,7 +2040,7 @@ func TestCopyS3ToLocalWithSameFilenameDontOverrideIfS3ObjectIsOlder(t *testing.T assert.Assert(t, fs.Equal(workdir.Path(), expected)) } -// cp -u -s --parents s3://bucket/prefix/* dir/ +// cp -u -s s3://bucket/prefix/* dir/ func TestCopyS3ToLocal_Issue70(t *testing.T) { t.Parallel() @@ -1691,7 +2066,7 @@ func TestCopyS3ToLocal_Issue70(t *testing.T) { srcpath := fmt.Sprintf("s3://%v/config/.local/*", bucket) dstpath := filepath.Join(workdir.Path(), ".local") - cmd := s5cmd("cp", "-u", "-s", "--parents", srcpath, dstpath) + cmd := s5cmd("cp", "-u", "-s", srcpath, dstpath) result := icmd.RunCmd(cmd, withWorkingDir(workdir)) @@ -2063,7 +2438,7 @@ func TestCopyLocalFileToS3WithPrefix(t *testing.T) { assert.Assert(t, fs.Equal(workdir.Path(), expected)) // assert s3 object - assert.Assert(t, ensureS3Object(s3client, bucket, fmt.Sprintf("s5cmdtest/%s", filename), content)) + assert.Assert(t, ensureS3Object(s3client, bucket, fmt.Sprintf("s5cmdtest/%v", filename), content)) } // cp file s3://bucket @@ -2105,8 +2480,8 @@ func TestMultipleLocalFileToS3Bucket(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, bucket, filename, content)) } -// cp --parents * s3://bucket/prefix/ -func TestCopyMultipleLocalNestedFilesToS3WithParents(t *testing.T) { +// cp * s3://bucket/prefix/ +func TestCopyMultipleLocalNestedFilesToS3(t *testing.T) { t.Parallel() s3client, s5cmd, cleanup := setup(t) @@ -2124,7 +2499,7 @@ func TestCopyMultipleLocalNestedFilesToS3WithParents(t *testing.T) { //  └─c //   └─file2.txt // - // after `s5cmd cp --parents * s3://bucket/prefix/`, expect: + // after `s5cmd cp * s3://bucket/prefix/`, expect: // // prefix // ├─a @@ -2154,7 +2529,7 @@ func TestCopyMultipleLocalNestedFilesToS3WithParents(t *testing.T) { dst := fmt.Sprintf("s3://%v/prefix/", bucket) - cmd := s5cmd("cp", "--parents", "*", dst) + cmd := s5cmd("cp", "*", dst) result := icmd.RunCmd(cmd, withWorkingDir(workdir)) result.Assert(t, icmd.Success) diff --git a/e2e/mv_test.go b/e2e/mv_test.go index 1ccb5527f..b2ab4542c 100644 --- a/e2e/mv_test.go +++ b/e2e/mv_test.go @@ -10,6 +10,7 @@ import ( "gotest.tools/v3/icmd" ) +// mv s3://bucket/key dir/ func TestMoveSingleS3ObjectToLocal(t *testing.T) { t.Parallel() @@ -46,7 +47,8 @@ func TestMoveSingleS3ObjectToLocal(t *testing.T) { assertError(t, err, errS3NoSuchKey) } -func TestMoveMultipleFlatS3ObjectsToLocal(t *testing.T) { +// mv s3://bucket/key dir/ +func TestMoveMultipleS3ObjectsToLocal(t *testing.T) { t.Parallel() bucket := s3BucketFromTestName(t) @@ -96,6 +98,7 @@ func TestMoveMultipleFlatS3ObjectsToLocal(t *testing.T) { } } +// mv file s3://bucket func TestMoveSingleFileToS3(t *testing.T) { t.Parallel() @@ -133,6 +136,7 @@ func TestMoveSingleFileToS3(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, bucket, filename, content)) } +// mv dir/* s3://bucket func TestMoveMultipleFilesToS3(t *testing.T) { t.Parallel() @@ -184,6 +188,7 @@ func TestMoveMultipleFilesToS3(t *testing.T) { } } +// mv s3://bucket/object s3://bucket2/object func TestMoveSingleS3ObjectToS3(t *testing.T) { t.Parallel() @@ -222,6 +227,7 @@ func TestMoveSingleS3ObjectToS3(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, bucket, "dst/"+filename, content)) } +// mv s3://bucket/object s3://bucket2/object func TestMoveSingleS3ObjectIntoAnotherBucket(t *testing.T) { t.Parallel() @@ -262,6 +268,7 @@ func TestMoveSingleS3ObjectIntoAnotherBucket(t *testing.T) { assert.Assert(t, ensureS3Object(s3client, dstbucket, filename, content)) } +// mv s3://bucket/* s3://bucket2/prefix/ func TestMoveMultipleS3ObjectsToS3(t *testing.T) { t.Parallel()