Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

command/mv: fix options usage #338

Merged
merged 1 commit into from
Aug 12, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 44 additions & 38 deletions command/cp.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,28 +72,28 @@ Examples:
> s5cmd {{.HelpName}} -n -s -u s3://bucket/source-prefix/* s3://bucket/target-prefix/

12. Perform KMS Server Side Encryption of the object(s) at the destination
> s5cmd {{.HelpName}} --sse aws:kms s3://bucket/object s3://target-bucket/prefix/object
> s5cmd {{.HelpName}} --sse aws:kms s3://bucket/object s3://target-bucket/prefix/object

13. Perform KMS-SSE of the object(s) at the destination using customer managed Customer Master Key (CMK) key id
> s5cmd {{.HelpName}} --sse aws:kms --sse-kms-key-id <your-kms-key-id> s3://bucket/object s3://target-bucket/prefix/object
> s5cmd {{.HelpName}} --sse aws:kms --sse-kms-key-id <your-kms-key-id> s3://bucket/object s3://target-bucket/prefix/object

14. Force transfer of GLACIER objects with a prefix whether they are restored or not
> s5cmd {{.HelpName}} --force-glacier-transfer s3://bucket/prefix/* target-directory/
> s5cmd {{.HelpName}} --force-glacier-transfer s3://bucket/prefix/* target-directory/

15. Upload a file to S3 bucket with public read s3 acl
> s5cmd {{.HelpName}} --acl "public-read" myfile.gz s3://bucket/
> s5cmd {{.HelpName}} --acl "public-read" myfile.gz s3://bucket/

16. Upload a file to S3 bucket with expires header
> s5cmd {{.HelpName}} --expires "2024-10-01T20:30:00Z" myfile.gz s3://bucket/
> s5cmd {{.HelpName}} --expires "2024-10-01T20:30:00Z" myfile.gz s3://bucket/

17. Upload a file to S3 bucket with cache-control header
> s5cmd {{.HelpName}} --cache-control "public, max-age=345600" myfile.gz s3://bucket/
> s5cmd {{.HelpName}} --cache-control "public, max-age=345600" myfile.gz s3://bucket/

18. Copy all files to S3 bucket but exclude the ones with txt and gz extension
> s5cmd cp --exclude "*.txt" --exclude "*.gz" dir/ s3://bucket
18. Copy all files to S3 bucket but exclude the ones with txt and gz extension
> s5cmd {{.HelpName}} --exclude "*.txt" --exclude "*.gz" dir/ s3://bucket

19. Copy all files from S3 bucket to another S3 bucket but exclude the ones starts with log
> s5cmd cp --exclude "log*" s3://bucket/* s3://destbucket
> s5cmd {{.HelpName}} --exclude "log*" s3://bucket/* s3://destbucket
`

func NewCopyCommandFlags() []cli.Flag {
Expand Down Expand Up @@ -198,35 +198,8 @@ func NewCopyCommand() *cli.Command {
Action: func(c *cli.Context) (err error) {
defer stat.Collect(c.Command.FullName(), &err)()

return Copy{
src: c.Args().Get(0),
dst: c.Args().Get(1),
op: c.Command.Name,
fullCommand: givenCommand(c),
deleteSource: false, // don't delete source
// flags
noClobber: c.Bool("no-clobber"),
ifSizeDiffer: c.Bool("if-size-differ"),
ifSourceNewer: c.Bool("if-source-newer"),
flatten: c.Bool("flatten"),
followSymlinks: !c.Bool("no-follow-symlinks"),
storageClass: storage.StorageClass(c.String("storage-class")),
concurrency: c.Int("concurrency"),
partSize: c.Int64("part-size") * megabytes,
encryptionMethod: c.String("sse"),
encryptionKeyID: c.String("sse-kms-key-id"),
acl: c.String("acl"),
forceGlacierTransfer: c.Bool("force-glacier-transfer"),
exclude: c.StringSlice("exclude"),
raw: c.Bool("raw"),
cacheControl: c.String("cache-control"),
expires: c.String("expires"),
// region settings
srcRegion: c.String("source-region"),
dstRegion: c.String("destination-region"),

storageOpts: NewStorageOpts(c),
}.Run(c.Context)
// don't delete source
return NewCopy(c, false).Run(c.Context)
},
}
}
Expand Down Expand Up @@ -266,6 +239,39 @@ type Copy struct {
storageOpts storage.Options
}

// NewCopy creates Copy from cli.Context.
func NewCopy(c *cli.Context, deleteSource bool) Copy {
return Copy{
src: c.Args().Get(0),
dst: c.Args().Get(1),
op: c.Command.Name,
fullCommand: givenCommand(c),
deleteSource: deleteSource,
// flags
noClobber: c.Bool("no-clobber"),
ifSizeDiffer: c.Bool("if-size-differ"),
ifSourceNewer: c.Bool("if-source-newer"),
flatten: c.Bool("flatten"),
followSymlinks: !c.Bool("no-follow-symlinks"),
storageClass: storage.StorageClass(c.String("storage-class")),
concurrency: c.Int("concurrency"),
partSize: c.Int64("part-size") * megabytes,
encryptionMethod: c.String("sse"),
encryptionKeyID: c.String("sse-kms-key-id"),
acl: c.String("acl"),
forceGlacierTransfer: c.Bool("force-glacier-transfer"),
exclude: c.StringSlice("exclude"),
raw: c.Bool("raw"),
cacheControl: c.String("cache-control"),
expires: c.String("expires"),
// region settings
srcRegion: c.String("source-region"),
dstRegion: c.String("destination-region"),

storageOpts: NewStorageOpts(c),
}
}

const fdlimitWarning = `
WARNING: s5cmd is hitting the max open file limit allowed by your OS. Either
increase the open file limit or try to decrease the number of workers with
Expand Down
32 changes: 8 additions & 24 deletions command/mv.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package command

import (
"github.com/peak/s5cmd/log/stat"
"github.com/peak/s5cmd/storage"

"github.com/urfave/cli/v2"
)
Expand Down Expand Up @@ -31,6 +30,12 @@ Examples:

5. Move a directory to S3 bucket recursively
> s5cmd {{.HelpName}} dir/ s3://bucket/

6. Move all files to S3 bucket but exclude the ones with txt and gz extension
> s5cmd {{.HelpName}} --exclude "*.txt" --exclude "*.gz" dir/ s3://bucket

7. Move all files from S3 bucket to another S3 bucket but exclude the ones starts with log
> s5cmd {{.HelpName}} --exclude "log*" s3://bucket/* s3://destbucket
`

func NewMoveCommand() *cli.Command {
Expand All @@ -46,29 +51,8 @@ func NewMoveCommand() *cli.Command {
Action: func(c *cli.Context) (err error) {
defer stat.Collect(c.Command.FullName(), &err)()

copyCommand := Copy{
src: c.Args().Get(0),
dst: c.Args().Get(1),
op: c.Command.Name,
fullCommand: givenCommand(c),
deleteSource: true, // delete source
// flags
noClobber: c.Bool("no-clobber"),
ifSizeDiffer: c.Bool("if-size-differ"),
ifSourceNewer: c.Bool("if-source-newer"),
flatten: c.Bool("flatten"),
followSymlinks: !c.Bool("no-follow-symlinks"),
storageClass: storage.StorageClass(c.String("storage-class")),
encryptionMethod: c.String("sse"),
encryptionKeyID: c.String("sse-kms-key-id"),
acl: c.String("acl"),
cacheControl: c.String("cache-control"),
expires: c.String("expires"),

storageOpts: NewStorageOpts(c),
}

return copyCommand.Run(c.Context)
// delete source
return NewCopy(c, true).Run(c.Context)
},
}
}
65 changes: 65 additions & 0 deletions e2e/mv_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package e2e
import (
"fmt"
"path/filepath"
"runtime"
"testing"

"gotest.tools/v3/assert"
Expand Down Expand Up @@ -360,3 +361,67 @@ func TestMoveMultipleS3ObjectsToS3DryRun(t *testing.T) {
assertError(t, err, errS3NoSuchKey)
}
}

// mv --raw file s3://bucket/
func TestMoveLocalObjectToS3WithRawFlag(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip()
}

t.Parallel()

bucket := s3BucketFromTestName(t)

s3client, s5cmd, cleanup := setup(t)
defer cleanup()

createBucket(t, s3client, bucket)

objectsToMove := []fs.PathOp{
fs.WithFile("a*.txt", "content"),
}

otherObjects := []fs.PathOp{
fs.WithDir(
"a*b",
fs.WithFile("file.txt", "content"),
),

fs.WithFile("abc.txt", "content"),
}

folderLayout := append(objectsToMove, otherObjects...)

workdir := fs.NewDir(t, t.Name(), folderLayout...)
defer workdir.Remove()

srcpath := filepath.ToSlash(workdir.Join("a*.txt"))
dstpath := fmt.Sprintf("s3://%v", bucket)

cmd := s5cmd("mv", "--raw", srcpath, dstpath)
result := icmd.RunCmd(cmd)

result.Assert(t, icmd.Success)

assertLines(t, result.Stdout(), map[int]compareFunc{
0: equals("mv %v %v/a*.txt", srcpath, dstpath),
}, sortInput(true))

expectedObjects := []string{"a*.txt"}
for _, obj := range expectedObjects {
err := ensureS3Object(s3client, bucket, obj, "content")
if err != nil {
t.Fatalf("Object %s is not in S3\n", obj)
}
}

nonExpectedObjects := []string{"a*b/file.txt", "abc.txt"}
for _, obj := range nonExpectedObjects {
err := ensureS3Object(s3client, bucket, obj, "content")
assertError(t, err, errS3NoSuchKey)
}

// assert local filesystem
expected := fs.Expected(t, otherObjects...)
assert.Assert(t, fs.Equal(workdir.Path(), expected))
}