mirror of
https://github.com/minio/mc.git
synced 2025-11-10 13:42:32 +03:00
Pass down encrypt-key handling across command validators (#2483)
Currently even if the command supports encrypted objects, the syntax verification would fail on them in certain cases. Since syntax validation of source/targets doesn't use the parsed encrypt key pairs. This PR fixes this behavior.
This commit is contained in:
committed by
kannappanr
parent
c57a2dc4c2
commit
70dcf20d74
@@ -152,7 +152,7 @@ func catURL(sourceURL string, encKeyDB map[string][]prefixSSEPair) *probe.Error
|
|||||||
// downloaded object is equal to the original one. FS files
|
// downloaded object is equal to the original one. FS files
|
||||||
// are ignored since some of them have zero size though they
|
// are ignored since some of them have zero size though they
|
||||||
// have contents like files under /proc.
|
// have contents like files under /proc.
|
||||||
client, content, err := url2Stat(sourceURL)
|
client, content, err := url2Stat(sourceURL, false, encKeyDB)
|
||||||
if err == nil && client.GetURL().Type == objectStorage {
|
if err == nil && client.GetURL().Type == objectStorage {
|
||||||
size = content.Size
|
size = content.Size
|
||||||
}
|
}
|
||||||
@@ -210,6 +210,9 @@ func catOut(r io.Reader, size int64) *probe.Error {
|
|||||||
|
|
||||||
// mainCat is the main entry point for cat command.
|
// mainCat is the main entry point for cat command.
|
||||||
func mainCat(ctx *cli.Context) error {
|
func mainCat(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'cat' cli arguments.
|
// check 'cat' cli arguments.
|
||||||
checkCatSyntax(ctx)
|
checkCatSyntax(ctx)
|
||||||
@@ -237,16 +240,11 @@ func mainCat(ctx *cli.Context) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
|
||||||
if key := ctx.String("encrypt-key"); key != "" {
|
|
||||||
sseKeys = key
|
|
||||||
}
|
|
||||||
|
|
||||||
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
|
|
||||||
fatalIf(err, "Unable to parse encryption keys.")
|
|
||||||
// Convert arguments to URLs: expand alias, fix format.
|
// Convert arguments to URLs: expand alias, fix format.
|
||||||
for _, url := range args {
|
for _, url := range args {
|
||||||
fatalIf(catURL(url, encKeyDB).Trace(url), "Unable to read from `"+url+"`.")
|
fatalIf(catURL(url, encKeyDB).Trace(url), "Unable to read from `"+url+"`.")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -171,12 +171,7 @@ func urlJoinPath(url1, url2 string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// url2Stat returns stat info for URL.
|
// url2Stat returns stat info for URL.
|
||||||
func url2Stat(urlStr string) (client Client, content *clientContent, err *probe.Error) {
|
func url2Stat(urlStr string, isFetchMeta bool, encKeyDB map[string][]prefixSSEPair) (client Client, content *clientContent, err *probe.Error) {
|
||||||
return url2StatWithMetadata(urlStr, false, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// url2Stat returns stat info for URL.
|
|
||||||
func url2StatWithMetadata(urlStr string, isFetchMeta bool, encKeyDB map[string][]prefixSSEPair) (client Client, content *clientContent, err *probe.Error) {
|
|
||||||
client, err = newClient(urlStr)
|
client, err = newClient(urlStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err.Trace(urlStr)
|
return nil, nil, err.Trace(urlStr)
|
||||||
|
|||||||
@@ -19,21 +19,37 @@ package cmd
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/minio/cli"
|
||||||
"github.com/minio/mc/pkg/probe"
|
"github.com/minio/mc/pkg/probe"
|
||||||
"golang.org/x/net/lex/httplex"
|
"golang.org/x/net/lex/httplex"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// parse and return encryption key pairs per alias.
|
||||||
|
func getEncKeys(ctx *cli.Context) (map[string][]prefixSSEPair, *probe.Error) {
|
||||||
|
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
||||||
|
if key := ctx.String("encrypt-key"); key != "" {
|
||||||
|
sseKeys = key
|
||||||
|
}
|
||||||
|
|
||||||
|
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err.Trace(sseKeys)
|
||||||
|
}
|
||||||
|
return encKeyDB, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Check if the passed URL represents a folder. It may or may not exist yet.
|
// Check if the passed URL represents a folder. It may or may not exist yet.
|
||||||
// If it exists, we can easily check if it is a folder, if it doesn't exist,
|
// If it exists, we can easily check if it is a folder, if it doesn't exist,
|
||||||
// we can guess if the url is a folder from how it looks.
|
// we can guess if the url is a folder from how it looks.
|
||||||
func isAliasURLDir(aliasURL string) bool {
|
func isAliasURLDir(aliasURL string, keys map[string][]prefixSSEPair) bool {
|
||||||
// If the target url exists, check if it is a directory
|
// If the target url exists, check if it is a directory
|
||||||
// and return immediately.
|
// and return immediately.
|
||||||
_, targetContent, err := url2Stat(aliasURL)
|
_, targetContent, err := url2Stat(aliasURL, false, keys)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return targetContent.Type.IsDir()
|
return targetContent.Type.IsDir()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -105,9 +105,8 @@ EXAMPLES:
|
|||||||
8. Copy a local folder with space separated characters to Amazon S3 cloud storage.
|
8. Copy a local folder with space separated characters to Amazon S3 cloud storage.
|
||||||
$ {{.HelpName}} --recursive 'workdir/documents/May 2014/' s3/miniocloud
|
$ {{.HelpName}} --recursive 'workdir/documents/May 2014/' s3/miniocloud
|
||||||
|
|
||||||
10. Copy a folder with encrypted objects recursively from Amazon S3 to Minio cloud storage.
|
9. Copy a folder with encrypted objects recursively from Amazon S3 to Minio cloud storage.
|
||||||
$ {{.HelpName}} --recursive --encrypt-key "s3/documents/a/b/c=32byteslongsecretkeymustbegiven1,myminio/documents/=32byteslongsecretkeymustbegiven2" 's3/documents/' myminio/documents/
|
$ {{.HelpName}} --recursive --encrypt-key "s3/documents/=32byteslongsecretkeymustbegiven1,myminio/documents/=32byteslongsecretkeymustbegiven2" s3/documents/ myminio/documents/
|
||||||
|
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,6 +222,7 @@ func doPrepareCopyURLs(session *sessionV8, trapCh <-chan bool, cancelCopy contex
|
|||||||
encryptKeys := session.Header.CommandStringFlags["encrypt-key"]
|
encryptKeys := session.Header.CommandStringFlags["encrypt-key"]
|
||||||
encKeyDB, err := parseAndValidateEncryptionKeys(encryptKeys)
|
encKeyDB, err := parseAndValidateEncryptionKeys(encryptKeys)
|
||||||
fatalIf(err, "Unable to parse encryption keys.")
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// Create a session data file to store the processed URLs.
|
// Create a session data file to store the processed URLs.
|
||||||
dataFP := session.NewDataWriter()
|
dataFP := session.NewDataWriter()
|
||||||
|
|
||||||
@@ -429,9 +429,12 @@ loop:
|
|||||||
|
|
||||||
// mainCopy is the entry point for cp command.
|
// mainCopy is the entry point for cp command.
|
||||||
func mainCopy(ctx *cli.Context) error {
|
func mainCopy(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'copy' cli arguments.
|
// check 'copy' cli arguments.
|
||||||
checkCopySyntax(ctx)
|
checkCopySyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// Additional command speific theme customization.
|
// Additional command speific theme customization.
|
||||||
console.SetColor("Copy", color.New(color.FgGreen, color.Bold))
|
console.SetColor("Copy", color.New(color.FgGreen, color.Bold))
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import (
|
|||||||
"github.com/minio/mc/pkg/console"
|
"github.com/minio/mc/pkg/console"
|
||||||
)
|
)
|
||||||
|
|
||||||
func checkCopySyntax(ctx *cli.Context) {
|
func checkCopySyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
if len(ctx.Args()) < 2 {
|
if len(ctx.Args()) < 2 {
|
||||||
cli.ShowCommandHelpAndExit(ctx, "cp", 1) // last argument is exit code.
|
cli.ShowCommandHelpAndExit(ctx, "cp", 1) // last argument is exit code.
|
||||||
}
|
}
|
||||||
@@ -38,10 +38,9 @@ func checkCopySyntax(ctx *cli.Context) {
|
|||||||
tgtURL := URLs[len(URLs)-1]
|
tgtURL := URLs[len(URLs)-1]
|
||||||
isRecursive := ctx.Bool("recursive")
|
isRecursive := ctx.Bool("recursive")
|
||||||
|
|
||||||
/****** Generic Invalid Rules *******/
|
|
||||||
// Verify if source(s) exists.
|
// Verify if source(s) exists.
|
||||||
for _, srcURL := range srcURLs {
|
for _, srcURL := range srcURLs {
|
||||||
_, _, err := url2Stat(srcURL)
|
_, _, err := url2Stat(srcURL, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
console.Fatalf("Unable to validate source %s\n", srcURL)
|
console.Fatalf("Unable to validate source %s\n", srcURL)
|
||||||
}
|
}
|
||||||
@@ -56,32 +55,33 @@ func checkCopySyntax(ctx *cli.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Guess CopyURLsType based on source and target URLs.
|
// Guess CopyURLsType based on source and target URLs.
|
||||||
copyURLsType, err := guessCopyURLType(srcURLs, tgtURL, isRecursive)
|
copyURLsType, err := guessCopyURLType(srcURLs, tgtURL, isRecursive, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
|
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
|
||||||
}
|
}
|
||||||
|
|
||||||
switch copyURLsType {
|
switch copyURLsType {
|
||||||
case copyURLsTypeA: // File -> File.
|
case copyURLsTypeA: // File -> File.
|
||||||
checkCopySyntaxTypeA(srcURLs, tgtURL)
|
checkCopySyntaxTypeA(srcURLs, tgtURL, encKeyDB)
|
||||||
case copyURLsTypeB: // File -> Folder.
|
case copyURLsTypeB: // File -> Folder.
|
||||||
checkCopySyntaxTypeB(srcURLs, tgtURL)
|
checkCopySyntaxTypeB(srcURLs, tgtURL, encKeyDB)
|
||||||
case copyURLsTypeC: // Folder... -> Folder.
|
case copyURLsTypeC: // Folder... -> Folder.
|
||||||
checkCopySyntaxTypeC(srcURLs, tgtURL, isRecursive)
|
checkCopySyntaxTypeC(srcURLs, tgtURL, isRecursive, encKeyDB)
|
||||||
case copyURLsTypeD: // File1...FileN -> Folder.
|
case copyURLsTypeD: // File1...FileN -> Folder.
|
||||||
checkCopySyntaxTypeD(srcURLs, tgtURL)
|
checkCopySyntaxTypeD(srcURLs, tgtURL, encKeyDB)
|
||||||
default:
|
default:
|
||||||
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
|
fatalIf(errInvalidArgument().Trace(), "Unable to guess the type of copy operation.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// checkCopySyntaxTypeA verifies if the source and target are valid file arguments.
|
// checkCopySyntaxTypeA verifies if the source and target are valid file arguments.
|
||||||
func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
|
func checkCopySyntaxTypeA(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
|
||||||
// Check source.
|
// Check source.
|
||||||
if len(srcURLs) != 1 {
|
if len(srcURLs) != 1 {
|
||||||
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
||||||
}
|
}
|
||||||
srcURL := srcURLs[0]
|
srcURL := srcURLs[0]
|
||||||
_, srcContent, err := url2Stat(srcURL)
|
_, srcContent, err := url2Stat(srcURL, false, keys)
|
||||||
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
|
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
|
||||||
|
|
||||||
if !srcContent.Type.IsRegular() {
|
if !srcContent.Type.IsRegular() {
|
||||||
@@ -90,13 +90,13 @@ func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkCopySyntaxTypeB verifies if the source is a valid file and target is a valid folder.
|
// checkCopySyntaxTypeB verifies if the source is a valid file and target is a valid folder.
|
||||||
func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
|
func checkCopySyntaxTypeB(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
|
||||||
// Check source.
|
// Check source.
|
||||||
if len(srcURLs) != 1 {
|
if len(srcURLs) != 1 {
|
||||||
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
||||||
}
|
}
|
||||||
srcURL := srcURLs[0]
|
srcURL := srcURLs[0]
|
||||||
_, srcContent, err := url2Stat(srcURL)
|
_, srcContent, err := url2Stat(srcURL, false, keys)
|
||||||
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
|
fatalIf(err.Trace(srcURL), "Unable to stat source `"+srcURL+"`.")
|
||||||
|
|
||||||
if !srcContent.Type.IsRegular() {
|
if !srcContent.Type.IsRegular() {
|
||||||
@@ -104,7 +104,7 @@ func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check target.
|
// Check target.
|
||||||
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
|
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
|
||||||
if !tgtContent.Type.IsDir() {
|
if !tgtContent.Type.IsDir() {
|
||||||
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
||||||
}
|
}
|
||||||
@@ -112,14 +112,14 @@ func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkCopySyntaxTypeC verifies if the source is a valid recursive dir and target is a valid folder.
|
// checkCopySyntaxTypeC verifies if the source is a valid recursive dir and target is a valid folder.
|
||||||
func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
|
func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool, keys map[string][]prefixSSEPair) {
|
||||||
// Check source.
|
// Check source.
|
||||||
if len(srcURLs) != 1 {
|
if len(srcURLs) != 1 {
|
||||||
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
fatalIf(errInvalidArgument().Trace(), "Invalid number of source arguments.")
|
||||||
}
|
}
|
||||||
|
|
||||||
srcURL := srcURLs[0]
|
srcURL := srcURLs[0]
|
||||||
_, srcContent, err := url2Stat(srcURL)
|
_, srcContent, err := url2Stat(srcURL, false, keys)
|
||||||
// incomplete uploads are not necessary for copy operation, no need to verify for them.
|
// incomplete uploads are not necessary for copy operation, no need to verify for them.
|
||||||
isIncomplete := false
|
isIncomplete := false
|
||||||
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {
|
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {
|
||||||
@@ -131,7 +131,7 @@ func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check target.
|
// Check target.
|
||||||
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
|
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
|
||||||
if !tgtContent.Type.IsDir() {
|
if !tgtContent.Type.IsDir() {
|
||||||
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
||||||
}
|
}
|
||||||
@@ -139,10 +139,10 @@ func checkCopySyntaxTypeC(srcURLs []string, tgtURL string, isRecursive bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkCopySyntaxTypeD verifies if the source is a valid list of files and target is a valid folder.
|
// checkCopySyntaxTypeD verifies if the source is a valid list of files and target is a valid folder.
|
||||||
func checkCopySyntaxTypeD(srcURLs []string, tgtURL string) {
|
func checkCopySyntaxTypeD(srcURLs []string, tgtURL string, keys map[string][]prefixSSEPair) {
|
||||||
// Source can be anything: file, dir, dir...
|
// Source can be anything: file, dir, dir...
|
||||||
// Check target if it is a dir
|
// Check target if it is a dir
|
||||||
if _, tgtContent, err := url2Stat(tgtURL); err == nil {
|
if _, tgtContent, err := url2Stat(tgtURL, false, keys); err == nil {
|
||||||
if !tgtContent.Type.IsDir() {
|
if !tgtContent.Type.IsDir() {
|
||||||
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
fatalIf(errInvalidArgument().Trace(tgtURL), "Target `"+tgtURL+"` is not a folder.")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,10 +50,10 @@ const (
|
|||||||
|
|
||||||
// guessCopyURLType guesses the type of clientURL. This approach all allows prepareURL
|
// guessCopyURLType guesses the type of clientURL. This approach all allows prepareURL
|
||||||
// functions to accurately report failure causes.
|
// functions to accurately report failure causes.
|
||||||
func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (copyURLsType, *probe.Error) {
|
func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool, keys map[string][]prefixSSEPair) (copyURLsType, *probe.Error) {
|
||||||
if len(sourceURLs) == 1 { // 1 Source, 1 Target
|
if len(sourceURLs) == 1 { // 1 Source, 1 Target
|
||||||
sourceURL := sourceURLs[0]
|
sourceURL := sourceURLs[0]
|
||||||
_, sourceContent, err := url2Stat(sourceURL)
|
_, sourceContent, err := url2Stat(sourceURL, false, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return copyURLsTypeInvalid, err
|
return copyURLsTypeInvalid, err
|
||||||
}
|
}
|
||||||
@@ -65,7 +65,7 @@ func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If target is a folder, it is Type B.
|
// If target is a folder, it is Type B.
|
||||||
if isAliasURLDir(targetURL) {
|
if isAliasURLDir(targetURL, keys) {
|
||||||
return copyURLsTypeB, nil
|
return copyURLsTypeB, nil
|
||||||
}
|
}
|
||||||
// else Type A.
|
// else Type A.
|
||||||
@@ -73,7 +73,7 @@ func guessCopyURLType(sourceURLs []string, targetURL string, isRecursive bool) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Multiple source args and target is a folder. It is Type D.
|
// Multiple source args and target is a folder. It is Type D.
|
||||||
if isAliasURLDir(targetURL) {
|
if isAliasURLDir(targetURL, keys) {
|
||||||
return copyURLsTypeD, nil
|
return copyURLsTypeD, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ func prepareCopyURLsTypeA(sourceURL string, targetURL string, encKeyDB map[strin
|
|||||||
// Find alias and expanded clientURL.
|
// Find alias and expanded clientURL.
|
||||||
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
|
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
|
||||||
|
|
||||||
_, sourceContent, err := url2Stat(sourceURL)
|
_, sourceContent, err := url2Stat(sourceURL, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Source does not exist or insufficient privileges.
|
// Source does not exist or insufficient privileges.
|
||||||
return URLs{Error: err.Trace(sourceURL)}
|
return URLs{Error: err.Trace(sourceURL)}
|
||||||
@@ -134,7 +134,7 @@ func prepareCopyURLsTypeB(sourceURL string, targetURL string, encKeyDB map[strin
|
|||||||
// Find alias and expanded clientURL.
|
// Find alias and expanded clientURL.
|
||||||
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
|
targetAlias, targetURL, _ := mustExpandAlias(targetURL)
|
||||||
|
|
||||||
_, sourceContent, err := url2Stat(sourceURL)
|
_, sourceContent, err := url2Stat(sourceURL, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Source does not exist or insufficient privileges.
|
// Source does not exist or insufficient privileges.
|
||||||
return URLs{Error: err.Trace(sourceURL)}
|
return URLs{Error: err.Trace(sourceURL)}
|
||||||
@@ -230,7 +230,7 @@ func prepareCopyURLs(sourceURLs []string, targetURL string, isRecursive bool, en
|
|||||||
copyURLsCh := make(chan URLs)
|
copyURLsCh := make(chan URLs)
|
||||||
go func(sourceURLs []string, targetURL string, copyURLsCh chan URLs, encKeyDB map[string][]prefixSSEPair) {
|
go func(sourceURLs []string, targetURL string, copyURLsCh chan URLs, encKeyDB map[string][]prefixSSEPair) {
|
||||||
defer close(copyURLsCh)
|
defer close(copyURLsCh)
|
||||||
cpType, err := guessCopyURLType(sourceURLs, targetURL, isRecursive)
|
cpType, err := guessCopyURLType(sourceURLs, targetURL, isRecursive, encKeyDB)
|
||||||
fatalIf(err.Trace(), "Unable to guess the type of copy operation.")
|
fatalIf(err.Trace(), "Unable to guess the type of copy operation.")
|
||||||
|
|
||||||
switch cpType {
|
switch cpType {
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ func (d diffMessage) JSON() string {
|
|||||||
return string(diffJSONBytes)
|
return string(diffJSONBytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkDiffSyntax(ctx *cli.Context) {
|
func checkDiffSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
if len(ctx.Args()) != 2 {
|
if len(ctx.Args()) != 2 {
|
||||||
cli.ShowCommandHelpAndExit(ctx, "diff", 1) // last argument is exit code
|
cli.ShowCommandHelpAndExit(ctx, "diff", 1) // last argument is exit code
|
||||||
}
|
}
|
||||||
@@ -124,7 +124,7 @@ func checkDiffSyntax(ctx *cli.Context) {
|
|||||||
// Diff only works between two directories, verify them below.
|
// Diff only works between two directories, verify them below.
|
||||||
|
|
||||||
// Verify if firstURL is accessible.
|
// Verify if firstURL is accessible.
|
||||||
_, firstContent, err := url2Stat(firstURL)
|
_, firstContent, err := url2Stat(firstURL, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalIf(err.Trace(firstURL), fmt.Sprintf("Unable to stat '%s'.", firstURL))
|
fatalIf(err.Trace(firstURL), fmt.Sprintf("Unable to stat '%s'.", firstURL))
|
||||||
}
|
}
|
||||||
@@ -135,7 +135,7 @@ func checkDiffSyntax(ctx *cli.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Verify if secondURL is accessible.
|
// Verify if secondURL is accessible.
|
||||||
_, secondContent, err := url2Stat(secondURL)
|
_, secondContent, err := url2Stat(secondURL, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalIf(err.Trace(secondURL), fmt.Sprintf("Unable to stat '%s'.", secondURL))
|
fatalIf(err.Trace(secondURL), fmt.Sprintf("Unable to stat '%s'.", secondURL))
|
||||||
}
|
}
|
||||||
@@ -189,9 +189,12 @@ func doDiffMain(firstURL, secondURL string) error {
|
|||||||
|
|
||||||
// mainDiff main for 'diff'.
|
// mainDiff main for 'diff'.
|
||||||
func mainDiff(ctx *cli.Context) error {
|
func mainDiff(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'diff' cli arguments.
|
// check 'diff' cli arguments.
|
||||||
checkDiffSyntax(ctx)
|
checkDiffSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// Additional command specific theme customization.
|
// Additional command specific theme customization.
|
||||||
console.SetColor("DiffMessage", color.New(color.FgGreen, color.Bold))
|
console.SetColor("DiffMessage", color.New(color.FgGreen, color.Bold))
|
||||||
|
|||||||
@@ -158,7 +158,7 @@ EXAMPLES:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkFindSyntax - validate the passed arguments
|
// checkFindSyntax - validate the passed arguments
|
||||||
func checkFindSyntax(ctx *cli.Context) {
|
func checkFindSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
args := ctx.Args()
|
args := ctx.Args()
|
||||||
if !args.Present() {
|
if !args.Present() {
|
||||||
args = []string{"./"} // No args just default to present directory.
|
args = []string{"./"} // No args just default to present directory.
|
||||||
@@ -174,7 +174,7 @@ func checkFindSyntax(ctx *cli.Context) {
|
|||||||
|
|
||||||
// Extract input URLs and validate.
|
// Extract input URLs and validate.
|
||||||
for _, url := range args {
|
for _, url := range args {
|
||||||
_, _, err := url2Stat(url)
|
_, _, err := url2Stat(url, false, encKeyDB)
|
||||||
if err != nil && !isURLPrefixExists(url, false) {
|
if err != nil && !isURLPrefixExists(url, false) {
|
||||||
// Bucket name empty is a valid error for 'find myminio' unless we are using watch, treat it as such.
|
// Bucket name empty is a valid error for 'find myminio' unless we are using watch, treat it as such.
|
||||||
if _, ok := err.ToGoError().(BucketNameEmpty); ok && !ctx.Bool("watch") {
|
if _, ok := err.ToGoError().(BucketNameEmpty); ok && !ctx.Bool("watch") {
|
||||||
@@ -216,7 +216,11 @@ func mainFind(ctx *cli.Context) error {
|
|||||||
console.SetColor("Find", color.New(color.FgGreen, color.Bold))
|
console.SetColor("Find", color.New(color.FgGreen, color.Bold))
|
||||||
console.SetColor("FindExecErr", color.New(color.FgRed, color.Italic, color.Bold))
|
console.SetColor("FindExecErr", color.New(color.FgRed, color.Italic, color.Bold))
|
||||||
|
|
||||||
checkFindSyntax(ctx)
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
|
checkFindSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
args := ctx.Args()
|
args := ctx.Args()
|
||||||
if !args.Present() {
|
if !args.Present() {
|
||||||
|
|||||||
@@ -35,6 +35,10 @@ var (
|
|||||||
Name: "incomplete, I",
|
Name: "incomplete, I",
|
||||||
Usage: "List incomplete uploads.",
|
Usage: "List incomplete uploads.",
|
||||||
},
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "encrypt-key",
|
||||||
|
Usage: "Encrypt/Decrypt (using server-side encryption)",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -91,8 +95,12 @@ func checkListSyntax(ctx *cli.Context) {
|
|||||||
URLs := ctx.Args()
|
URLs := ctx.Args()
|
||||||
isIncomplete := ctx.Bool("incomplete")
|
isIncomplete := ctx.Bool("incomplete")
|
||||||
|
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
for _, url := range URLs {
|
for _, url := range URLs {
|
||||||
_, _, err := url2Stat(url)
|
_, _, err := url2Stat(url, false, encKeyDB)
|
||||||
if err != nil && !isURLPrefixExists(url, isIncomplete) {
|
if err != nil && !isURLPrefixExists(url, isIncomplete) {
|
||||||
// Bucket name empty is a valid error for 'ls myminio',
|
// Bucket name empty is a valid error for 'ls myminio',
|
||||||
// treat it as such.
|
// treat it as such.
|
||||||
|
|||||||
@@ -141,11 +141,11 @@ EXAMPLES:
|
|||||||
9. Mirror objects newer than 10 days from bucket test to a local folder.
|
9. Mirror objects newer than 10 days from bucket test to a local folder.
|
||||||
$ {{.HelpName}} --newer-than=10 s3/test ~/localfolder
|
$ {{.HelpName}} --newer-than=10 s3/test ~/localfolder
|
||||||
|
|
||||||
10. Mirror a bucket older than 30 days from Amazon S3 bucket test to a local folder.
|
10. Mirror objects older than 30 days from Amazon S3 bucket test to a local folder.
|
||||||
$ {{.HelpName}} --older-than=30 s3/test ~/test
|
$ {{.HelpName}} --older-than=30 s3/test ~/test
|
||||||
|
|
||||||
11. Mirror server encrypted objects from Minio cloud storage to a bucket on Amazon S3 cloud storage
|
11. Mirror server encrypted objects from Minio cloud storage to a bucket on Amazon S3 cloud storage
|
||||||
$ {{.HelpName}} --encrypt-key "minio/photos=32byteslongsecretkeymustbegiven1,s3/archive=32byteslongsecretkeymustbegiven2" minio/photos/ s3/archive
|
$ {{.HelpName}} --encrypt-key "minio/photos=32byteslongsecretkeymustbegiven1,s3/archive=32byteslongsecretkeymustbegiven2" minio/photos/ s3/archive/
|
||||||
|
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
@@ -752,9 +752,12 @@ func runMirror(srcURL, dstURL string, ctx *cli.Context, encKeyDB map[string][]pr
|
|||||||
|
|
||||||
// Main entry point for mirror command.
|
// Main entry point for mirror command.
|
||||||
func mainMirror(ctx *cli.Context) error {
|
func mainMirror(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'mirror' cli arguments.
|
// check 'mirror' cli arguments.
|
||||||
checkMirrorSyntax(ctx)
|
checkMirrorSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// Additional command specific theme customization.
|
// Additional command specific theme customization.
|
||||||
console.SetColor("Mirror", color.New(color.FgGreen, color.Bold))
|
console.SetColor("Mirror", color.New(color.FgGreen, color.Bold))
|
||||||
@@ -763,13 +766,6 @@ func mainMirror(ctx *cli.Context) error {
|
|||||||
|
|
||||||
srcURL := args[0]
|
srcURL := args[0]
|
||||||
tgtURL := args[1]
|
tgtURL := args[1]
|
||||||
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
|
||||||
if key := ctx.String("encrypt-key"); key != "" {
|
|
||||||
sseKeys = key
|
|
||||||
}
|
|
||||||
|
|
||||||
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
|
|
||||||
fatalIf(err, "Unable to parse encryption keys.")
|
|
||||||
|
|
||||||
if err := runMirror(srcURL, tgtURL, ctx, encKeyDB); err != nil {
|
if err := runMirror(srcURL, tgtURL, ctx, encKeyDB); err != nil {
|
||||||
errorIf(err.Trace(srcURL, tgtURL), "Unable to mirror.")
|
errorIf(err.Trace(srcURL, tgtURL), "Unable to mirror.")
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ import (
|
|||||||
// mirror(d1..., d2) -> []mirror(d1/f, d2/d1/f)
|
// mirror(d1..., d2) -> []mirror(d1/f, d2/d1/f)
|
||||||
|
|
||||||
// checkMirrorSyntax(URLs []string)
|
// checkMirrorSyntax(URLs []string)
|
||||||
func checkMirrorSyntax(ctx *cli.Context) {
|
func checkMirrorSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
if len(ctx.Args()) != 2 {
|
if len(ctx.Args()) != 2 {
|
||||||
cli.ShowCommandHelpAndExit(ctx, "mirror", 1) // last argument is exit code.
|
cli.ShowCommandHelpAndExit(ctx, "mirror", 1) // last argument is exit code.
|
||||||
}
|
}
|
||||||
@@ -49,7 +49,7 @@ func checkMirrorSyntax(ctx *cli.Context) {
|
|||||||
|
|
||||||
/****** Generic rules *******/
|
/****** Generic rules *******/
|
||||||
if !ctx.Bool("watch") {
|
if !ctx.Bool("watch") {
|
||||||
_, srcContent, err := url2Stat(srcURL)
|
_, srcContent, err := url2Stat(srcURL, false, encKeyDB)
|
||||||
// incomplete uploads are not necessary for copy operation, no need to verify for them.
|
// incomplete uploads are not necessary for copy operation, no need to verify for them.
|
||||||
isIncomplete := false
|
isIncomplete := false
|
||||||
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {
|
if err != nil && !isURLPrefixExists(srcURL, isIncomplete) {
|
||||||
|
|||||||
@@ -104,23 +104,19 @@ func checkPipeSyntax(ctx *cli.Context) {
|
|||||||
|
|
||||||
// mainPipe is the main entry point for pipe command.
|
// mainPipe is the main entry point for pipe command.
|
||||||
func mainPipe(ctx *cli.Context) error {
|
func mainPipe(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// validate pipe input arguments.
|
// validate pipe input arguments.
|
||||||
checkPipeSyntax(ctx)
|
checkPipeSyntax(ctx)
|
||||||
|
|
||||||
if len(ctx.Args()) == 0 {
|
if len(ctx.Args()) == 0 {
|
||||||
err := pipe("", nil)
|
err = pipe("", nil)
|
||||||
fatalIf(err.Trace("stdout"), "Unable to write to one or more targets.")
|
fatalIf(err.Trace("stdout"), "Unable to write to one or more targets.")
|
||||||
} else {
|
} else {
|
||||||
// extract URLs.
|
// extract URLs.
|
||||||
URLs := ctx.Args()
|
URLs := ctx.Args()
|
||||||
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
|
||||||
if key := ctx.String("encrypt-key"); key != "" {
|
|
||||||
sseKeys = key
|
|
||||||
}
|
|
||||||
|
|
||||||
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
|
|
||||||
fatalIf(err, "Unable to parse encryption keys.")
|
|
||||||
err = pipe(URLs[0], encKeyDB)
|
err = pipe(URLs[0], encKeyDB)
|
||||||
fatalIf(err.Trace(URLs[0]), "Unable to write to one or more targets.")
|
fatalIf(err.Trace(URLs[0]), "Unable to write to one or more targets.")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -146,19 +146,20 @@ func (r rmMessage) JSON() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Validate command line arguments.
|
// Validate command line arguments.
|
||||||
func checkRmSyntax(ctx *cli.Context) {
|
func checkRmSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
// Set command flags from context.
|
// Set command flags from context.
|
||||||
isForce := ctx.Bool("force")
|
isForce := ctx.Bool("force")
|
||||||
isRecursive := ctx.Bool("recursive")
|
isRecursive := ctx.Bool("recursive")
|
||||||
isStdin := ctx.Bool("stdin")
|
isStdin := ctx.Bool("stdin")
|
||||||
isDangerous := ctx.Bool("dangerous")
|
isDangerous := ctx.Bool("dangerous")
|
||||||
isNamespaceRemoval := false
|
isNamespaceRemoval := false
|
||||||
|
|
||||||
for _, url := range ctx.Args() {
|
for _, url := range ctx.Args() {
|
||||||
// clean path for aliases like s3/.
|
// clean path for aliases like s3/.
|
||||||
//Note: UNC path using / works properly in go 1.9.2 even though it breaks the UNC specification.
|
//Note: UNC path using / works properly in go 1.9.2 even though it breaks the UNC specification.
|
||||||
url = filepath.ToSlash(filepath.Clean(url))
|
url = filepath.ToSlash(filepath.Clean(url))
|
||||||
// namespace removal applies only for non FS. So filter out if passed url represents a directory
|
// namespace removal applies only for non FS. So filter out if passed url represents a directory
|
||||||
if !isAliasURLDir(url) {
|
if !isAliasURLDir(url, encKeyDB) {
|
||||||
_, path := url2Alias(url)
|
_, path := url2Alias(url)
|
||||||
isNamespaceRemoval = (path == "")
|
isNamespaceRemoval = (path == "")
|
||||||
break
|
break
|
||||||
@@ -320,9 +321,12 @@ func removeRecursive(url string, isIncomplete bool, isFake bool, olderThan int,
|
|||||||
|
|
||||||
// main for rm command.
|
// main for rm command.
|
||||||
func mainRm(ctx *cli.Context) error {
|
func mainRm(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'rm' cli arguments.
|
// check 'rm' cli arguments.
|
||||||
checkRmSyntax(ctx)
|
checkRmSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// rm specific flags.
|
// rm specific flags.
|
||||||
isIncomplete := ctx.Bool("incomplete")
|
isIncomplete := ctx.Bool("incomplete")
|
||||||
@@ -331,28 +335,22 @@ func mainRm(ctx *cli.Context) error {
|
|||||||
isStdin := ctx.Bool("stdin")
|
isStdin := ctx.Bool("stdin")
|
||||||
olderThan := ctx.Int("older-than")
|
olderThan := ctx.Int("older-than")
|
||||||
newerThan := ctx.Int("newer-than")
|
newerThan := ctx.Int("newer-than")
|
||||||
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
|
||||||
if key := ctx.String("encrypt-key"); key != "" {
|
|
||||||
sseKeys = key
|
|
||||||
}
|
|
||||||
|
|
||||||
encKeyDB, perr := parseAndValidateEncryptionKeys(sseKeys)
|
|
||||||
fatalIf(perr, "Unable to parse encryption keys.")
|
|
||||||
// Set color.
|
// Set color.
|
||||||
console.SetColor("Remove", color.New(color.FgGreen, color.Bold))
|
console.SetColor("Remove", color.New(color.FgGreen, color.Bold))
|
||||||
|
|
||||||
var rerr error
|
var rerr error
|
||||||
var err error
|
var e error
|
||||||
// Support multiple targets.
|
// Support multiple targets.
|
||||||
for _, url := range ctx.Args() {
|
for _, url := range ctx.Args() {
|
||||||
if isRecursive {
|
if isRecursive {
|
||||||
err = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
e = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
||||||
} else {
|
} else {
|
||||||
err = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
e = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
||||||
}
|
}
|
||||||
|
|
||||||
if rerr == nil {
|
if rerr == nil {
|
||||||
rerr = err
|
rerr = e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -364,13 +362,13 @@ func mainRm(ctx *cli.Context) error {
|
|||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
url := scanner.Text()
|
url := scanner.Text()
|
||||||
if isRecursive {
|
if isRecursive {
|
||||||
err = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
e = removeRecursive(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
||||||
} else {
|
} else {
|
||||||
err = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
e = removeSingle(url, isIncomplete, isFake, olderThan, newerThan, encKeyDB)
|
||||||
}
|
}
|
||||||
|
|
||||||
if rerr == nil {
|
if rerr == nil {
|
||||||
rerr = err
|
rerr = e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ EXAMPLES:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkShareDownloadSyntax - validate command-line args.
|
// checkShareDownloadSyntax - validate command-line args.
|
||||||
func checkShareDownloadSyntax(ctx *cli.Context) {
|
func checkShareDownloadSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
args := ctx.Args()
|
args := ctx.Args()
|
||||||
if !args.Present() {
|
if !args.Present() {
|
||||||
cli.ShowCommandHelpAndExit(ctx, "download", 1) // last argument is exit code.
|
cli.ShowCommandHelpAndExit(ctx, "download", 1) // last argument is exit code.
|
||||||
@@ -94,7 +94,7 @@ func checkShareDownloadSyntax(ctx *cli.Context) {
|
|||||||
isRecursive := ctx.Bool("recursive")
|
isRecursive := ctx.Bool("recursive")
|
||||||
if !isRecursive {
|
if !isRecursive {
|
||||||
for _, url := range ctx.Args() {
|
for _, url := range ctx.Args() {
|
||||||
_, _, err := url2Stat(url)
|
_, _, err := url2Stat(url, false, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
|
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
|
||||||
}
|
}
|
||||||
@@ -193,9 +193,12 @@ func doShareDownloadURL(targetURL string, isRecursive bool, expiry time.Duration
|
|||||||
|
|
||||||
// main for share download.
|
// main for share download.
|
||||||
func mainShareDownload(ctx *cli.Context) error {
|
func mainShareDownload(ctx *cli.Context) error {
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check input arguments.
|
// check input arguments.
|
||||||
checkShareDownloadSyntax(ctx)
|
checkShareDownloadSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// Initialize share config folder.
|
// Initialize share config folder.
|
||||||
initShareConfig()
|
initShareConfig()
|
||||||
|
|||||||
@@ -17,7 +17,6 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
@@ -75,7 +74,7 @@ EXAMPLES:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// checkStatSyntax - validate all the passed arguments
|
// checkStatSyntax - validate all the passed arguments
|
||||||
func checkStatSyntax(ctx *cli.Context) {
|
func checkStatSyntax(ctx *cli.Context, encKeyDB map[string][]prefixSSEPair) {
|
||||||
if !ctx.Args().Present() {
|
if !ctx.Args().Present() {
|
||||||
cli.ShowCommandHelpAndExit(ctx, "stat", 1) // last argument is exit code
|
cli.ShowCommandHelpAndExit(ctx, "stat", 1) // last argument is exit code
|
||||||
}
|
}
|
||||||
@@ -89,8 +88,9 @@ func checkStatSyntax(ctx *cli.Context) {
|
|||||||
// extract URLs.
|
// extract URLs.
|
||||||
URLs := ctx.Args()
|
URLs := ctx.Args()
|
||||||
isIncomplete := false
|
isIncomplete := false
|
||||||
|
|
||||||
for _, url := range URLs {
|
for _, url := range URLs {
|
||||||
_, _, err := url2Stat(url)
|
_, _, err := url2Stat(url, false, encKeyDB)
|
||||||
if err != nil && !isURLPrefixExists(url, isIncomplete) {
|
if err != nil && !isURLPrefixExists(url, isIncomplete) {
|
||||||
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
|
fatalIf(err.Trace(url), "Unable to stat `"+url+"`.")
|
||||||
}
|
}
|
||||||
@@ -108,8 +108,12 @@ func mainStat(ctx *cli.Context) error {
|
|||||||
console.SetColor("EncryptionHeaders", color.New(color.FgWhite))
|
console.SetColor("EncryptionHeaders", color.New(color.FgWhite))
|
||||||
console.SetColor("Metadata", color.New(color.FgWhite))
|
console.SetColor("Metadata", color.New(color.FgWhite))
|
||||||
|
|
||||||
|
// Parse encryption keys per command.
|
||||||
|
encKeyDB, err := getEncKeys(ctx)
|
||||||
|
fatalIf(err, "Unable to parse encryption keys.")
|
||||||
|
|
||||||
// check 'stat' cli arguments.
|
// check 'stat' cli arguments.
|
||||||
checkStatSyntax(ctx)
|
checkStatSyntax(ctx, encKeyDB)
|
||||||
|
|
||||||
// Set command flags from context.
|
// Set command flags from context.
|
||||||
isRecursive := ctx.Bool("recursive")
|
isRecursive := ctx.Bool("recursive")
|
||||||
@@ -120,14 +124,6 @@ func mainStat(ctx *cli.Context) error {
|
|||||||
args = []string{"."}
|
args = []string{"."}
|
||||||
}
|
}
|
||||||
|
|
||||||
sseKeys := os.Getenv("MC_ENCRYPT_KEY")
|
|
||||||
if key := ctx.String("encrypt-key"); key != "" {
|
|
||||||
sseKeys = key
|
|
||||||
}
|
|
||||||
|
|
||||||
encKeyDB, err := parseAndValidateEncryptionKeys(sseKeys)
|
|
||||||
fatalIf(err, "Unable to parse encryption keys.")
|
|
||||||
|
|
||||||
var cErr error
|
var cErr error
|
||||||
for _, targetURL := range args {
|
for _, targetURL := range args {
|
||||||
var clnt Client
|
var clnt Client
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ func doStat(clnt Client, isRecursive bool, targetAlias, targetURL string, encKey
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
url := targetAlias + getKey(content)
|
url := targetAlias + getKey(content)
|
||||||
_, stat, err := url2StatWithMetadata(url, true, encKeyDB)
|
_, stat, err := url2Stat(url, true, encKeyDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stat = content
|
stat = content
|
||||||
}
|
}
|
||||||
|
|||||||
4
vendor/github.com/minio/minio-go/api-compose-object.go
generated
vendored
4
vendor/github.com/minio/minio-go/api-compose-object.go
generated
vendored
@@ -455,7 +455,7 @@ func (c Client) ComposeObjectWithProgress(dst DestinationInfo, srcs []SourceInfo
|
|||||||
for i, src := range srcs {
|
for i, src := range srcs {
|
||||||
h := src.Headers
|
h := src.Headers
|
||||||
if src.encryption != nil {
|
if src.encryption != nil {
|
||||||
src.encryption.Marshal(h)
|
encrypt.SSECopy(src.encryption).Marshal(h)
|
||||||
}
|
}
|
||||||
// Add destination encryption headers
|
// Add destination encryption headers
|
||||||
if dst.encryption != nil {
|
if dst.encryption != nil {
|
||||||
@@ -480,7 +480,7 @@ func (c Client) ComposeObjectWithProgress(dst DestinationInfo, srcs []SourceInfo
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if progress != nil {
|
if progress != nil {
|
||||||
io.CopyN(ioutil.Discard, progress, start+end-1)
|
io.CopyN(ioutil.Discard, progress, end-start+1)
|
||||||
}
|
}
|
||||||
objParts = append(objParts, complPart)
|
objParts = append(objParts, complPart)
|
||||||
partIndex++
|
partIndex++
|
||||||
|
|||||||
2
vendor/github.com/minio/minio-go/api.go
generated
vendored
2
vendor/github.com/minio/minio-go/api.go
generated
vendored
@@ -99,7 +99,7 @@ type Options struct {
|
|||||||
// Global constants.
|
// Global constants.
|
||||||
const (
|
const (
|
||||||
libraryName = "minio-go"
|
libraryName = "minio-go"
|
||||||
libraryVersion = "v6.0.5"
|
libraryVersion = "v6.0.6"
|
||||||
)
|
)
|
||||||
|
|
||||||
// User Agent should always following the below style.
|
// User Agent should always following the below style.
|
||||||
|
|||||||
30
vendor/vendor.json
vendored
30
vendor/vendor.json
vendored
@@ -56,46 +56,46 @@
|
|||||||
"revisionTime": "2015-10-24T22:24:27-07:00"
|
"revisionTime": "2015-10-24T22:24:27-07:00"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "qmCEhMpDtl8rzdoxAlK9pz/rkek=",
|
"checksumSHA1": "PiGm5X1m6ZVCHxE6tua2tlCYg5I=",
|
||||||
"path": "github.com/minio/minio-go",
|
"path": "github.com/minio/minio-go",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "Qsj+6JPmJ8R5rFNQSHqRb8xAwOw=",
|
"checksumSHA1": "Qsj+6JPmJ8R5rFNQSHqRb8xAwOw=",
|
||||||
"path": "github.com/minio/minio-go/pkg/credentials",
|
"path": "github.com/minio/minio-go/pkg/credentials",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "Md5pOKYfoKtrG7xNvs2FtiDPfDc=",
|
"checksumSHA1": "Md5pOKYfoKtrG7xNvs2FtiDPfDc=",
|
||||||
"path": "github.com/minio/minio-go/pkg/encrypt",
|
"path": "github.com/minio/minio-go/pkg/encrypt",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "6D/qMFV+e39L+6aeT+Seq1guohM=",
|
"checksumSHA1": "6D/qMFV+e39L+6aeT+Seq1guohM=",
|
||||||
"path": "github.com/minio/minio-go/pkg/policy",
|
"path": "github.com/minio/minio-go/pkg/policy",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "bbWjcrOQsV57qK+BSsrNAsI+Q/o=",
|
"checksumSHA1": "bbWjcrOQsV57qK+BSsrNAsI+Q/o=",
|
||||||
"path": "github.com/minio/minio-go/pkg/s3signer",
|
"path": "github.com/minio/minio-go/pkg/s3signer",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "xrJThFwwkVrJdwd5iYFHqfx4wRY=",
|
"checksumSHA1": "xrJThFwwkVrJdwd5iYFHqfx4wRY=",
|
||||||
"path": "github.com/minio/minio-go/pkg/s3utils",
|
"path": "github.com/minio/minio-go/pkg/s3utils",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "Wt8ej+rZXTdNBR9Xyw1eGo3Iq5o=",
|
"checksumSHA1": "Wt8ej+rZXTdNBR9Xyw1eGo3Iq5o=",
|
||||||
"path": "github.com/minio/minio-go/pkg/set",
|
"path": "github.com/minio/minio-go/pkg/set",
|
||||||
"revision": "70799fe8dae6ecfb6c7d7e9e048fce27f23a1992",
|
"revision": "f44ca5490afab26b1b2988eb870b20c5183817b0",
|
||||||
"revisionTime": "2018-07-05T14:57:19Z"
|
"revisionTime": "2018-07-11T12:25:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "MEC+K9aTG+8tfPjnJ4qj2Y+kc4s=",
|
"checksumSHA1": "MEC+K9aTG+8tfPjnJ4qj2Y+kc4s=",
|
||||||
|
|||||||
Reference in New Issue
Block a user