1
0
mirror of https://github.com/minio/mc.git synced 2025-11-10 13:42:32 +03:00

Pass down encrypt-key handling across command validators (#2483)

Currently even if the command supports encrypted objects,
the syntax verification would fail on them in certain
cases. Since syntax validation of source/targets doesn't
use the parsed encrypt key pairs.  This PR fixes this
behavior.
This commit is contained in:
Harshavardhana
2018-07-12 11:13:51 -07:00
committed by kannappanr
parent c57a2dc4c2
commit 70dcf20d74
19 changed files with 138 additions and 122 deletions

View File

@@ -152,7 +152,7 @@ func catURL(sourceURL string, encKeyDB map[string][]prefixSSEPair) *probe.Error
// downloaded object is equal to the original one. FS files
// are ignored since some of them have zero size though they
// have contents like files under /proc.
client, content, err := url2Stat(sourceURL)
client, content, err := url2Stat(sourceURL, false, encKeyDB)
if err == nil && client.GetURL().Type == objectStorage {
size = content.Size
}
@@ -210,6 +210,9 @@ func catOut(r io.Reader, size int64) *probe.Error {
// mainCat is the main entry point for cat command.
func mainCat(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'cat' cli arguments.
checkCatSyntax(ctx)
@@ -237,16 +240,11 @@ func mainCat(ctx *cli.Context) error {
}
}
}
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
fatalIf(err, "Unable to parse encryption keys.")
// Convert arguments to URLs: expand alias, fix format.
for _, url := range args {
fatalIf(catURL(url, encKeyDB).Trace(url), "Unable to read from `"+url+"`.")
}
return nil
}

View File

@@ -171,12 +171,7 @@ func urlJoinPath(url1, url2 string) string {
}
// url2Stat returns stat info for URL.
func url2Stat(urlStr string) (client Client, content *clientContent, err *probe.Error) {
return url2StatWithMetadata(urlStr, false, nil)
}
// url2Stat returns stat info for URL.
func url2StatWithMetadata(urlStr string, isFetchMeta bool, encKeyDB map[string][]prefixSSEPair) (client Client, content *clientContent, err *probe.Error) {
func url2Stat(urlStr string, isFetchMeta bool, encKeyDB map[string][]prefixSSEPair) (client Client, content *clientContent, err *probe.Error) {
client, err = newClient(urlStr)
if err != nil {
return nil, nil, err.Trace(urlStr)

View File

@@ -19,21 +19,37 @@ package cmd
import (
"context"
"io"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/minio/cli"
"github.com/minio/mc/pkg/probe"
"golang.org/x/net/lex/httplex"
)
// parse and return encryption key pairs per alias.
func getEncKeys(ctx *cli.Context) (map[string][]prefixSSEPair, *probe.Error) {
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
if err != nil {
return nil, err.Trace(sseKeys)
}
return encKeyDB, nil
}
// Check if the passed URL represents a folder. It may or may not exist yet.
// If it exists, we can easily check if it is a folder, if it doesn't exist,
// we can guess if the url is a folder from how it looks.
func isAliasURLDir(aliasURL string) bool {
func isAliasURLDir(aliasURL string, keys map[string][]prefixSSEPair) bool {
// If the target url exists, check if it is a directory
// and return immediately.
_, targetContent, err := url2Stat(aliasURL)
_, targetContent, err := url2Stat(aliasURL, false, keys)
if err == nil {
return targetContent.Type.IsDir()
}

View File

@@ -105,9 +105,8 @@ EXAMPLES:
8. Copy a local folder with space separated characters to Amazon S3 cloud storage.
$ {{.HelpName}} --recursive 'workdir/documents/May 2014/' s3/miniocloud
10. Copy a folder with encrypted objects recursively from Amazon S3 to Minio cloud storage.
$ {{.HelpName}} --recursive --encrypt-key "s3/documents/a/b/c=32byteslongsecretkeymustbegiven1,myminio/documents/=32byteslongsecretkeymustbegiven2" 's3/documents/' myminio/documents/
9. Copy a folder with encrypted objects recursively from Amazon S3 to Minio cloud storage.
$ {{.HelpName}} --recursive --encrypt-key "s3/documents/=32byteslongsecretkeymustbegiven1,myminio/documents/=32byteslongsecretkeymustbegiven2" s3/documents/ myminio/documents/
`,
}
@@ -223,6 +222,7 @@ func doPrepareCopyURLs(session *sessionV8, trapCh <-chan bool, cancelCopy contex
encryptKeys := session.Header.CommandStringFlags["encrypt-key"]
encKeyDB, err := parseAndValidateEncryptionKeys(encryptKeys)
fatalIf(err, "Unable to parse encryption keys.")
// Create a session data file to store the processed URLs.
dataFP := session.NewDataWriter()
@@ -429,9 +429,12 @@ loop:
// mainCopy is the entry point for cp command.
func mainCopy(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'copy' cli arguments.
checkCopySyntax(ctx)
checkCopySyntax(ctx, encKeyDB)
// Additional command speific theme customization.
console.SetColor("Copy", color.New(color.FgGreen, color.Bold))

View File

@@ -23,7 +23,7 @@ import (
"github.com/minio/mc/pkg/console"
)
func checkCopySyntax(ctx *cli.Context) {
func checkCopySyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
if len(ctx.Args()) < 2 {
cli.ShowCommandHelpAndExit(ctx, "cp", 1) // last argument is exit code.
}
@@ -38,10 +38,9 @@ func checkCopySyntax(ctx *cli.Context) {
tgtURL := URLs[len(URLs)-1]
isRecursive := ctx.Bool("recursive")
/****** Generic Invalid Rules *******/
// Verify if source(s) exists.
for _, srcURL := range srcURLs {
_, _, err := url2Stat(srcURL)
_, _, err := url2Stat(srcURL, false, encKeyDB)
if err != nil {
console.Fatalf("Unable to validate source %s\n", srcURL)
}
@@ -56,32 +55,33 @@ func checkCopySyntax(ctx *cli.Context) {
}
// Guess CopyURLsType based on source and target URLs.
copyURLsType, err := guessCopyURLType(srcURLs, tgtURL, isRecursive)
copyURLsType, err := guessCopyURLType(srcURLs, tgtURL, isRecursive, encKeyDB)
if err != nil {
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
}
switch copyURLsType {
case copyURLsTypeA: // File -> File.
checkCopySyntaxTypeA(srcURLs, tgtURL)
checkCopySyntaxTypeA(srcURLs, tgtURL, encKeyDB)
case copyURLsTypeB: // File -> Folder.
checkCopySyntaxTypeB(srcURLs, tgtURL)
checkCopySyntaxTypeB(srcURLs, tgtURL, encKeyDB)
case copyURLsTypeC: // Folder... -> Folder.
checkCopySyntaxTypeC(srcURLs, tgtURL, isRecursive)
checkCopySyntaxTypeC(srcURLs, tgtURL, isRecursive, encKeyDB)
case copyURLsTypeD: // File1...FileN -> Folder.
checkCopySyntaxTypeD(srcURLs, tgtURL)
checkCopySyntaxTypeD(srcURLs, tgtURL, encKeyDB)
default:
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
}
}
// checkCopySyntaxTypeA verifies if the source and target are valid file arguments.
func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
func checkCopySyntaxTypeA(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
// Check source.
if len(srcURLs) != 1 {
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
}
srcURL := srcURLs[0]
_, srcContent, err := url2Stat(srcURL)
_, srcContent, err := url2Stat(srcURL, false, keys)
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
if !srcContent.Type.IsRegular() {
@@ -90,13 +90,13 @@ func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
}
// checkCopySyntaxTypeB verifies if the source is a valid file and target is a valid folder.
func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
func checkCopySyntaxTypeB(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
// Check source.
if len(srcURLs) != 1 {
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
}
srcURL := srcURLs[0]
_, srcContent, err := url2Stat(srcURL)
_, srcContent, err := url2Stat(srcURL, false, keys)
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
if !srcContent.Type.IsRegular() {
@@ -104,7 +104,7 @@ func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
}
// Check target.
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
if !tgtContent.Type.IsDir() {
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
}
@@ -112,14 +112,14 @@ func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
}
// checkCopySyntaxTypeC verifies if the source is a valid recursive dir and target is a valid folder.
func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool, keys map[string][]prefixSSEPair) {
// Check source.
if len(srcURLs) != 1 {
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
}
srcURL := srcURLs[0]
_, srcContent, err := url2Stat(srcURL)
_, srcContent, err := url2Stat(srcURL, false, keys)
// incomplete uploads are not necessary for copy operation, no need to verify for them.
isIncomplete := false
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {
@@ -131,7 +131,7 @@ func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
}
// Check target.
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
if !tgtContent.Type.IsDir() {
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
}
@@ -139,10 +139,10 @@ func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
}
// checkCopySyntaxTypeD verifies if the source is a valid list of files and target is a valid folder.
func checkCopySyntaxTypeD(srcURLs []string, tgtURL string) {
func checkCopySyntaxTypeD(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
// Source can be anything: file, dir, dir...
// Check target if it is a dir
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
if !tgtContent.Type.IsDir() {
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
}

View File

@@ -50,10 +50,10 @@ const (
// guessCopyURLType guesses the type of clientURL. This approach all allows prepareURL
// functions to accurately report failure causes.
func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (copyURLsType, *probe.Error) {
func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool, keys map[string][]prefixSSEPair) (copyURLsType, *probe.Error) {
if len(sourceURLs) == 1 { // 1 Source, 1 Target
sourceURL := sourceURLs[0]
_, sourceContent, err := url2Stat(sourceURL)
_, sourceContent, err := url2Stat(sourceURL, false, keys)
if err != nil {
return copyURLsTypeInvalid, err
}
@@ -65,7 +65,7 @@ func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (
}
// If target is a folder, it is Type B.
if isAliasURLDir(targetURL) {
if isAliasURLDir(targetURL, keys) {
return copyURLsTypeB, nil
}
// else Type A.
@@ -73,7 +73,7 @@ func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (
}
// Multiple source args and target is a folder. It is Type D.
if isAliasURLDir(targetURL) {
if isAliasURLDir(targetURL, keys) {
return copyURLsTypeD, nil
}
@@ -88,7 +88,7 @@ func prepareCopyURLsTypeA(sourceURL string, targetURL string, encKeyDB map[strin
// Find alias and expanded clientURL.
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
_, sourceContent, err := url2Stat(sourceURL)
_, sourceContent, err := url2Stat(sourceURL, false, encKeyDB)
if err != nil {
// Source does not exist or insufficient privileges.
return URLs{Error: err.Trace(sourceURL)}
@@ -134,7 +134,7 @@ func prepareCopyURLsTypeB(sourceURL string, targetURL string, encKeyDB map[strin
// Find alias and expanded clientURL.
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
_, sourceContent, err := url2Stat(sourceURL)
_, sourceContent, err := url2Stat(sourceURL, false, encKeyDB)
if err != nil {
// Source does not exist or insufficient privileges.
return URLs{Error: err.Trace(sourceURL)}
@@ -230,7 +230,7 @@ func prepareCopyURLs(sourceURLs []string, targetURL string, isRecursive bool, en
copyURLsCh := make(chan URLs)
go func(sourceURLs []string, targetURL string, copyURLsCh chan URLs, encKeyDB map[string][]prefixSSEPair) {
defer close(copyURLsCh)
cpType, err := guessCopyURLType(sourceURLs, targetURL, isRecursive)
cpType, err := guessCopyURLType(sourceURLs, targetURL, isRecursive, encKeyDB)
fatalIf(err.Trace(), "Unable to guess the type of copy operation.")
switch cpType {

View File

@@ -108,7 +108,7 @@ func (d diffMessage) JSON() string {
return string(diffJSONBytes)
}
func checkDiffSyntax(ctx *cli.Context) {
func checkDiffSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
if len(ctx.Args()) != 2 {
cli.ShowCommandHelpAndExit(ctx, "diff", 1) // last argument is exit code
}
@@ -124,7 +124,7 @@ func checkDiffSyntax(ctx *cli.Context) {
// Diff only works between two directories, verify them below.
// Verify if firstURL is accessible.
_, firstContent, err := url2Stat(firstURL)
_, firstContent, err := url2Stat(firstURL, false, encKeyDB)
if err != nil {
fatalIf(err.Trace(firstURL), fmt.Sprintf("Unable to stat '%s'.", firstURL))
}
@@ -135,7 +135,7 @@ func checkDiffSyntax(ctx *cli.Context) {
}
// Verify if secondURL is accessible.
_, secondContent, err := url2Stat(secondURL)
_, secondContent, err := url2Stat(secondURL, false, encKeyDB)
if err != nil {
fatalIf(err.Trace(secondURL), fmt.Sprintf("Unable to stat '%s'.", secondURL))
}
@@ -189,9 +189,12 @@ func doDiffMain(firstURL, secondURL string) error {
// mainDiff main for 'diff'.
func mainDiff(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'diff' cli arguments.
checkDiffSyntax(ctx)
checkDiffSyntax(ctx, encKeyDB)
// Additional command specific theme customization.
console.SetColor("DiffMessage", color.New(color.FgGreen, color.Bold))

View File

@@ -158,7 +158,7 @@ EXAMPLES:
}
// checkFindSyntax - validate the passed arguments
func checkFindSyntax(ctx *cli.Context) {
func checkFindSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
args := ctx.Args()
if !args.Present() {
args = []string{"./"} // No args just default to present directory.
@@ -174,7 +174,7 @@ func checkFindSyntax(ctx *cli.Context) {
// Extract input URLs and validate.
for _, url := range args {
_, _, err := url2Stat(url)
_, _, err := url2Stat(url, false, encKeyDB)
if err != nil && !isURLPrefixExists(url, false) {
// Bucket name empty is a valid error for 'find myminio' unless we are using watch, treat it as such.
if _, ok := err.ToGoError().(BucketNameEmpty); ok && !ctx.Bool("watch") {
@@ -216,7 +216,11 @@ func mainFind(ctx *cli.Context) error {
console.SetColor("Find", color.New(color.FgGreen, color.Bold))
console.SetColor("FindExecErr", color.New(color.FgRed, color.Italic, color.Bold))
checkFindSyntax(ctx)
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
checkFindSyntax(ctx, encKeyDB)
args := ctx.Args()
if !args.Present() {

View File

@@ -35,6 +35,10 @@ var (
Name: "incomplete, I",
Usage: "List incomplete uploads.",
},
cli.StringFlag{
Name: "encrypt-key",
Usage: "Encrypt/Decrypt (using server-side encryption)",
},
}
)
@@ -91,8 +95,12 @@ func checkListSyntax(ctx *cli.Context) {
URLs := ctx.Args()
isIncomplete := ctx.Bool("incomplete")
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
for _, url := range URLs {
_, _, err := url2Stat(url)
_, _, err := url2Stat(url, false, encKeyDB)
if err != nil && !isURLPrefixExists(url, isIncomplete) {
// Bucket name empty is a valid error for 'ls myminio',
// treat it as such.

View File

@@ -141,11 +141,11 @@ EXAMPLES:
9. Mirror objects newer than 10 days from bucket test to a local folder.
$ {{.HelpName}} --newer-than=10 s3/test ~/localfolder
10. Mirror a bucket older than 30 days from Amazon S3 bucket test to a local folder.
10. Mirror objects older than 30 days from Amazon S3 bucket test to a local folder.
$ {{.HelpName}} --older-than=30 s3/test ~/test
11. Mirror server encrypted objects from Minio cloud storage to a bucket on Amazon S3 cloud storage
$ {{.HelpName}} --encrypt-key "minio/photos=32byteslongsecretkeymustbegiven1,s3/archive=32byteslongsecretkeymustbegiven2" minio/photos/ s3/archive
$ {{.HelpName}} --encrypt-key "minio/photos=32byteslongsecretkeymustbegiven1,s3/archive=32byteslongsecretkeymustbegiven2" minio/photos/ s3/archive/
`,
}
@@ -752,9 +752,12 @@ func runMirror(srcURL, dstURL string, ctx *cli.Context, encKeyDB map[string][]pr
// Main entry point for mirror command.
func mainMirror(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'mirror' cli arguments.
checkMirrorSyntax(ctx)
checkMirrorSyntax(ctx, encKeyDB)
// Additional command specific theme customization.
console.SetColor("Mirror", color.New(color.FgGreen, color.Bold))
@@ -763,13 +766,6 @@ func mainMirror(ctx *cli.Context) error {
srcURL := args[0]
tgtURL := args[1]
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
fatalIf(err, "Unable to parse encryption keys.")
if err := runMirror(srcURL, tgtURL, ctx, encKeyDB); err != nil {
errorIf(err.Trace(srcURL, tgtURL), "Unable to mirror.")

View File

@@ -31,7 +31,7 @@ import (
// mirror(d1..., d2) -> []mirror(d1/f, d2/d1/f)
// checkMirrorSyntax(URLs []string)
func checkMirrorSyntax(ctx *cli.Context) {
func checkMirrorSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
if len(ctx.Args()) != 2 {
cli.ShowCommandHelpAndExit(ctx, "mirror", 1) // last argument is exit code.
}
@@ -49,7 +49,7 @@ func checkMirrorSyntax(ctx *cli.Context) {
/****** Generic rules *******/
if !ctx.Bool("watch") {
_, srcContent, err := url2Stat(srcURL)
_, srcContent, err := url2Stat(srcURL, false, encKeyDB)
// incomplete uploads are not necessary for copy operation, no need to verify for them.
isIncomplete := false
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {

View File

@@ -104,23 +104,19 @@ func checkPipeSyntax(ctx *cli.Context) {
// mainPipe is the main entry point for pipe command.
func mainPipe(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// validate pipe input arguments.
checkPipeSyntax(ctx)
if len(ctx.Args()) == 0 {
err := pipe("", nil)
err = pipe("", nil)
fatalIf(err.Trace("stdout"), "Unable to write to one or more targets.")
} else {
// extract URLs.
URLs := ctx.Args()
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
fatalIf(err, "Unable to parse encryption keys.")
err = pipe(URLs[0], encKeyDB)
fatalIf(err.Trace(URLs[0]), "Unable to write to one or more targets.")
}

View File

@@ -146,19 +146,20 @@ func (r rmMessage) JSON() string {
}
// Validate command line arguments.
func checkRmSyntax(ctx *cli.Context) {
func checkRmSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
// Set command flags from context.
isForce := ctx.Bool("force")
isRecursive := ctx.Bool("recursive")
isStdin := ctx.Bool("stdin")
isDangerous := ctx.Bool("dangerous")
isNamespaceRemoval := false
for _, url := range ctx.Args() {
// clean path for aliases like s3/.
//Note: UNC path using / works properly in go 1.9.2 even though it breaks the UNC specification.
url = filepath.ToSlash(filepath.Clean(url))
// namespace removal applies only for non FS. So filter out if passed url represents a directory
if !isAliasURLDir(url) {
if !isAliasURLDir(url, encKeyDB) {
_, path := url2Alias(url)
isNamespaceRemoval = (path == "")
break
@@ -320,9 +321,12 @@ func removeRecursive(url string, isIncomplete bool, isFake bool, olderThan int,
// main for rm command.
func mainRm(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'rm' cli arguments.
checkRmSyntax(ctx)
checkRmSyntax(ctx, encKeyDB)
// rm specific flags.
isIncomplete := ctx.Bool("incomplete")
@@ -331,28 +335,22 @@ func mainRm(ctx *cli.Context) error {
isStdin := ctx.Bool("stdin")
olderThan := ctx.Int("older-than")
newerThan := ctx.Int("newer-than")
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, perr := parseAndValidateEncryptionKeys(sseKeys)
fatalIf(perr, "Unable to parse encryption keys.")
// Set color.
console.SetColor("Remove", color.New(color.FgGreen, color.Bold))
var rerr error
var err error
var e error
// Support multiple targets.
for _, url := range ctx.Args() {
if isRecursive {
err = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
e = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
} else {
err = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
e = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
}
if rerr == nil {
rerr = err
rerr = e
}
}
@@ -364,13 +362,13 @@ func mainRm(ctx *cli.Context) error {
for scanner.Scan() {
url := scanner.Text()
if isRecursive {
err = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
e = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
} else {
err = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
e = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
}
if rerr == nil {
rerr = err
rerr = e
}
}

View File

@@ -67,7 +67,7 @@ EXAMPLES:
}
// checkShareDownloadSyntax - validate command-line args.
func checkShareDownloadSyntax(ctx *cli.Context) {
func checkShareDownloadSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
args := ctx.Args()
if !args.Present() {
cli.ShowCommandHelpAndExit(ctx, "download", 1) // last argument is exit code.
@@ -94,7 +94,7 @@ func checkShareDownloadSyntax(ctx *cli.Context) {
isRecursive := ctx.Bool("recursive")
if !isRecursive {
for _, url := range ctx.Args() {
_, _, err := url2Stat(url)
_, _, err := url2Stat(url, false, encKeyDB)
if err != nil {
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
}
@@ -193,9 +193,12 @@ func doShareDownloadURL(targetURL string, isRecursive bool, expiry time.Duration
// main for share download.
func mainShareDownload(ctx *cli.Context) error {
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check input arguments.
checkShareDownloadSyntax(ctx)
checkShareDownloadSyntax(ctx, encKeyDB)
// Initialize share config folder.
initShareConfig()

View File

@@ -17,7 +17,6 @@
package cmd
import (
"os"
"strings"
"github.com/fatih/color"
@@ -75,7 +74,7 @@ EXAMPLES:
}
// checkStatSyntax - validate all the passed arguments
func checkStatSyntax(ctx *cli.Context) {
func checkStatSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
if !ctx.Args().Present() {
cli.ShowCommandHelpAndExit(ctx, "stat", 1) // last argument is exit code
}
@@ -89,8 +88,9 @@ func checkStatSyntax(ctx *cli.Context) {
// extract URLs.
URLs := ctx.Args()
isIncomplete := false
for _, url := range URLs {
_, _, err := url2Stat(url)
_, _, err := url2Stat(url, false, encKeyDB)
if err != nil && !isURLPrefixExists(url, isIncomplete) {
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
}
@@ -108,8 +108,12 @@ func mainStat(ctx *cli.Context) error {
console.SetColor("EncryptionHeaders", color.New(color.FgWhite))
console.SetColor("Metadata", color.New(color.FgWhite))
// Parse encryption keys per command.
encKeyDB, err := getEncKeys(ctx)
fatalIf(err, "Unable to parse encryption keys.")
// check 'stat' cli arguments.
checkStatSyntax(ctx)
checkStatSyntax(ctx, encKeyDB)
// Set command flags from context.
isRecursive := ctx.Bool("recursive")
@@ -120,14 +124,6 @@ func mainStat(ctx *cli.Context) error {
args = []string{"."}
}
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
if key := ctx.String("encrypt-key"); key != "" {
sseKeys = key
}
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
fatalIf(err, "Unable to parse encryption keys.")
var cErr error
for _, targetURL := range args {
var clnt Client

View File

@@ -144,7 +144,7 @@ func doStat(clnt Client, isRecursive bool, targetAlias, targetURL string, encKey
continue
}
url := targetAlias + getKey(content)
_, stat, err := url2StatWithMetadata(url, true, encKeyDB)
_, stat, err := url2Stat(url, true, encKeyDB)
if err != nil {
stat = content
}

View File

@@ -455,7 +455,7 @@ func (c Client) ComposeObjectWithProgress(dst DestinationInfo, srcs []SourceInfo
for i, src := range srcs {
h := src.Headers
if src.encryption != nil {
src.encryption.Marshal(h)
encrypt.SSECopy(src.encryption).Marshal(h)
}
// Add destination encryption headers
if dst.encryption != nil {
@@ -480,7 +480,7 @@ func (c Client) ComposeObjectWithProgress(dst DestinationInfo, srcs []SourceInfo
return err
}
if progress != nil {
io.CopyN(ioutil.Discard, progress, start+end-1)
io.CopyN(ioutil.Discard, progress, end-start+1)
}
objParts = append(objParts, complPart)
partIndex++

View File

@@ -99,7 +99,7 @@ type Options struct {
// Global constants.
const (
libraryName = "minio-go"
libraryVersion = "v6.0.5"
libraryVersion = "v6.0.6"
)
// User Agent should always following the below style.

30
vendor/vendor.json vendored
View File

@@ -56,46 +56,46 @@
"revisionTime": "2015-10-24T22:24:27-07:00"
},
{
"checksumSHA1": "qmCEhMpDtl8rzdoxAlK9pz/rkek=",
"checksumSHA1": "PiGm5X1m6ZVCHxE6tua2tlCYg5I=",
"path": "github.com/minio/minio-go",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "Qsj+6JPmJ8R5rFNQSHqRb8xAwOw=",
"path": "github.com/minio/minio-go/pkg/credentials",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "Md5pOKYfoKtrG7xNvs2FtiDPfDc=",
"path": "github.com/minio/minio-go/pkg/encrypt",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "6D/qMFV+e39L+6aeT+Seq1guohM=",
"path": "github.com/minio/minio-go/pkg/policy",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "bbWjcrOQsV57qK+BSsrNAsI+Q/o=",
"path": "github.com/minio/minio-go/pkg/s3signer",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "xrJThFwwkVrJdwd5iYFHqfx4wRY=",
"path": "github.com/minio/minio-go/pkg/s3utils",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "Wt8ej+rZXTdNBR9Xyw1eGo3Iq5o=",
"path": "github.com/minio/minio-go/pkg/set",
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
"revisionTime": "2018-07-05T14:57:19Z"
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
"revisionTime": "2018-07-11T12:25:12Z"
},
{
"checksumSHA1": "MEC+K9aTG+8tfPjnJ4qj2Y+kc4s=",