1
0
mirror of https://github.com/minio/mc.git synced 2025-11-13 12:22:45 +03:00

re-vendorize probe and simplfy typed errors

This commit is contained in:
Anand Babu (AB) Periasamy
2015-08-24 04:06:28 -07:00
parent 0ee26b9f6c
commit 40ff9f1033
19 changed files with 112 additions and 120 deletions

View File

@@ -17,8 +17,6 @@
package main package main
import ( import (
"errors"
"github.com/minio/cli" "github.com/minio/cli"
"github.com/minio/mc/pkg/client" "github.com/minio/mc/pkg/client"
"github.com/minio/minio/pkg/probe" "github.com/minio/minio/pkg/probe"
@@ -66,7 +64,7 @@ func mainAccess(ctx *cli.Context) {
config := mustGetMcConfig() config := mustGetMcConfig()
acl := bucketACL(ctx.Args().First()) acl := bucketACL(ctx.Args().First())
if !acl.isValidBucketACL() { if !acl.isValidBucketACL() {
fatalIf(probe.NewError(errors.New("")), fatalIf(errDummy.Trace(),
"Unrecognized permission "+acl.String()+". Allowed values are [private, public, readonly].") "Unrecognized permission "+acl.String()+". Allowed values are [private, public, readonly].")
} }
for _, arg := range ctx.Args().Tail() { for _, arg := range ctx.Args().Tail() {

View File

@@ -74,7 +74,7 @@ func aliasExpand(aliasedURL string, aliases map[string]string) (string, *probe.E
trimmedURL := expandedURL + "/" + strings.TrimPrefix(strings.TrimPrefix(splits[1], "/"), "\\") trimmedURL := expandedURL + "/" + strings.TrimPrefix(strings.TrimPrefix(splits[1], "/"), "\\")
u, err := client.Parse(trimmedURL) u, err := client.Parse(trimmedURL)
if err != nil { if err != nil {
return aliasedURL, probe.NewError(errInvalidURL{URL: aliasedURL}) return aliasedURL, probe.NewError(eInvalidURL{URL: aliasedURL})
} }
return u.String(), nil return u.String(), nil
} }

View File

@@ -165,7 +165,7 @@ func getNewClient(urlStr string, auth hostConfig) (client.Client, *probe.Error)
case client.Filesystem: case client.Filesystem:
return fs.New(urlStr) return fs.New(urlStr)
} }
return nil, probe.NewError(errInitClient{url: urlStr}) return nil, probe.NewError(eInitClient{URL: urlStr})
} }
// url2Stat - Returns client, config and its stat Content from the URL // url2Stat - Returns client, config and its stat Content from the URL
@@ -199,7 +199,7 @@ func isValidURL(url string) bool {
func url2Client(url string) (client.Client, *probe.Error) { func url2Client(url string) (client.Client, *probe.Error) {
if !isValidURL(url) { if !isValidURL(url) {
return nil, probe.NewError(errInvalidURL{URL: url}) return nil, probe.NewError(eInvalidURL{URL: url})
} }
urlconfig, err := getHostConfig(url) urlconfig, err := getHostConfig(url)
if err != nil { if err != nil {

View File

@@ -18,7 +18,6 @@ package main
import ( import (
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"strings" "strings"
@@ -96,7 +95,7 @@ func mainConfig(ctx *cli.Context) {
arg := ctx.Args().First() arg := ctx.Args().First()
tailArgs := ctx.Args().Tail() tailArgs := ctx.Args().Tail()
if len(tailArgs) > 2 { if len(tailArgs) > 2 {
fatalIf(probe.NewError(errors.New("")), "Incorrect number of arguments to config command. Please read mc config help") fatalIf(errDummy.Trace(), "Incorrect number of arguments to config command. Please read mc config help")
} }
switch arg { switch arg {
@@ -130,7 +129,7 @@ func mainConfig(ctx *cli.Context) {
// addAlias - add new aliases // addAlias - add new aliases
func addAlias(alias, url string) { func addAlias(alias, url string) {
if alias == "" || url == "" { if alias == "" || url == "" {
fatalIf(probe.NewError(errors.New("")), "Alias or URL cannot be empty.") fatalIf(errDummy.Trace(), "Alias or URL cannot be empty.")
} }
conf := newConfigV2() conf := newConfigV2()
config, err := quick.New(conf) config, err := quick.New(conf)
@@ -141,18 +140,18 @@ func addAlias(alias, url string) {
url = strings.TrimSuffix(url, "/") url = strings.TrimSuffix(url, "/")
if !strings.HasPrefix(url, "http") { if !strings.HasPrefix(url, "http") {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Invalid alias URL %s. Valid examples are: http://s3.amazonaws.com, https://yourbucket.example.com.", url)) fatalIf(errDummy.Trace(), fmt.Sprintf("Invalid alias URL %s. Valid examples are: http://s3.amazonaws.com, https://yourbucket.example.com.", url))
} }
if isAliasReserved(alias) { if isAliasReserved(alias) {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Cannot use a reserved name %s as an alias. Following are reserved names: [help, private, readonly, public, authenticated].", alias)) fatalIf(errDummy.Trace(), fmt.Sprintf("Cannot use a reserved name %s as an alias. Following are reserved names: [help, private, readonly, public, authenticated].", alias))
} }
if !isValidAliasName(alias) { if !isValidAliasName(alias) {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Alias name %s is invalid, valid examples are: mybucket, Area51, Grand-Nagus", alias)) fatalIf(errDummy.Trace(), fmt.Sprintf("Alias name %s is invalid, valid examples are: mybucket, Area51, Grand-Nagus", alias))
} }
// convert interface{} back to its original struct // convert interface{} back to its original struct
newConf := config.Data().(*configV2) newConf := config.Data().(*configV2)
if oldURL, ok := newConf.Aliases[alias]; ok { if oldURL, ok := newConf.Aliases[alias]; ok {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Alias %s already exists for %s.", alias, oldURL)) fatalIf(errDummy.Trace(), fmt.Sprintf("Alias %s already exists for %s.", alias, oldURL))
} }
newConf.Aliases[alias] = url newConf.Aliases[alias] = url
newConfig, err := quick.New(newConf) newConfig, err := quick.New(newConf)

View File

@@ -107,7 +107,7 @@ func mustGetMcConfigPath() string {
// getMcConfig - reads configuration file and returns config // getMcConfig - reads configuration file and returns config
func getMcConfig() (*configV2, *probe.Error) { func getMcConfig() (*configV2, *probe.Error) {
if !isMcConfigExists() { if !isMcConfigExists() {
return nil, probe.NewError(errInvalidArgument) return nil, errInvalidArgument.Trace()
} }
configFile, err := getMcConfigPath() configFile, err := getMcConfigPath()
@@ -158,7 +158,7 @@ func isMcConfigExists() bool {
// writeConfig - write configuration file // writeConfig - write configuration file
func writeConfig(config quick.Config) *probe.Error { func writeConfig(config quick.Config) *probe.Error {
if config == nil { if config == nil {
return probe.NewError(errInvalidArgument) return errInvalidArgument.Trace()
} }
err := createMcConfigDir() err := createMcConfigDir()
if err != nil { if err != nil {

View File

@@ -17,7 +17,6 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"path/filepath" "path/filepath"
"strings" "strings"
@@ -72,7 +71,7 @@ func checkCopySyntax(ctx *cli.Context) {
/****** Generic rules *******/ /****** Generic rules *******/
// Recursive URLs are not allowed in target. // Recursive URLs are not allowed in target.
if isURLRecursive(tgtURL) { if isURLRecursive(tgtURL) {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Recursive option is not supported for target %s argument.", tgtURL)) fatalIf(errDummy.Trace(), fmt.Sprintf("Recursive option is not supported for target %s argument.", tgtURL))
} }
// scope locally // scope locally
{ {
@@ -82,7 +81,7 @@ func checkCopySyntax(ctx *cli.Context) {
} }
if url.Host != "" { if url.Host != "" {
if url.Path == string(url.Separator) { if url.Path == string(url.Separator) {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Target %s does not contain bucket name.", tgtURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Target %s does not contain bucket name.", tgtURL))
} }
} }
} }
@@ -96,14 +95,14 @@ func checkCopySyntax(ctx *cli.Context) {
case copyURLsTypeD: // File | Folder... -> Folder. case copyURLsTypeD: // File | Folder... -> Folder.
checkCopySyntaxTypeD(srcURLs, tgtURL) checkCopySyntaxTypeD(srcURLs, tgtURL)
default: default:
fatalIf(probe.NewError(errInvalidArgument), "Invalid arguments to copy command.") fatalIf(errInvalidArgument.Trace(), "Invalid arguments to copy command.")
} }
} }
// checkCopySyntaxTypeA verifies if the source and target are valid file arguments. // checkCopySyntaxTypeA verifies if the source and target are valid file arguments.
func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) { func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
if len(srcURLs) != 1 { if len(srcURLs) != 1 {
fatalIf(probe.NewError(errInvalidArgument), "Invalid number of source arguments to copy command.") fatalIf(errInvalidArgument.Trace(), "Invalid number of source arguments to copy command.")
} }
srcURL := srcURLs[0] srcURL := srcURLs[0]
_, srcContent, err := url2Stat(srcURL) _, srcContent, err := url2Stat(srcURL)
@@ -111,34 +110,34 @@ func checkCopySyntaxTypeA(srcURLs []string, tgtURL string) {
fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".")
if srcContent.Type.IsDir() { if srcContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL))
} }
if !srcContent.Type.IsRegular() { if !srcContent.Type.IsRegular() {
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a file.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a file.")
} }
} }
// checkCopySyntaxTypeB verifies if the source is a valid file and target is a valid dir. // checkCopySyntaxTypeB verifies if the source is a valid file and target is a valid dir.
func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) { func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
if len(srcURLs) != 1 { if len(srcURLs) != 1 {
fatalIf(probe.NewError(errInvalidArgument), "Invalid number of source arguments to copy command.") fatalIf(errInvalidArgument.Trace(), "Invalid number of source arguments to copy command.")
} }
srcURL := srcURLs[0] srcURL := srcURLs[0]
_, srcContent, err := url2Stat(srcURL) _, srcContent, err := url2Stat(srcURL)
fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".")
if srcContent.Type.IsDir() { if srcContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL))
} }
if !srcContent.Type.IsRegular() { if !srcContent.Type.IsRegular() {
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a file.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a file.")
} }
_, tgtContent, err := url2Stat(tgtURL) _, tgtContent, err := url2Stat(tgtURL)
// Target exist?. // Target exist?.
if err == nil { if err == nil {
if !tgtContent.Type.IsDir() { if !tgtContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), "Target "+tgtURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Target "+tgtURL+" is not a folder.")
} }
} }
} }
@@ -146,7 +145,7 @@ func checkCopySyntaxTypeB(srcURLs []string, tgtURL string) {
// checkCopySyntaxTypeC verifies if the source is a valid recursive dir and target is a valid dir. // checkCopySyntaxTypeC verifies if the source is a valid recursive dir and target is a valid dir.
func checkCopySyntaxTypeC(srcURLs []string, tgtURL string) { func checkCopySyntaxTypeC(srcURLs []string, tgtURL string) {
if len(srcURLs) != 1 { if len(srcURLs) != 1 {
fatalIf(probe.NewError(errInvalidArgument), "Invalid number of source arguments to copy command.") fatalIf(errInvalidArgument.Trace(), "Invalid number of source arguments to copy command.")
} }
srcURL := srcURLs[0] srcURL := srcURLs[0]
srcURL = stripRecursiveURL(srcURL) srcURL = stripRecursiveURL(srcURL)
@@ -154,13 +153,13 @@ func checkCopySyntaxTypeC(srcURLs []string, tgtURL string) {
fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".")
if srcContent.Type.IsRegular() { // Ellipses is supported only for folders. if srcContent.Type.IsRegular() { // Ellipses is supported only for folders.
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a folder.")
} }
_, tgtContent, err := url2Stat(tgtURL) _, tgtContent, err := url2Stat(tgtURL)
// Target exist?. // Target exist?.
if err == nil { if err == nil {
if !tgtContent.Type.IsDir() { if !tgtContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), "Target "+tgtURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Target "+tgtURL+" is not a folder.")
} }
} }
} }
@@ -174,17 +173,17 @@ func checkCopySyntaxTypeD(srcURLs []string, tgtURL string) {
fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".")
if !srcContent.Type.IsDir() { // Ellipses is supported only for folders. if !srcContent.Type.IsDir() { // Ellipses is supported only for folders.
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a folder.")
} }
} else { // Regular URL. } else { // Regular URL.
_, srcContent, err := url2Stat(srcURL) _, srcContent, err := url2Stat(srcURL)
fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+srcURL+".")
if srcContent.Type.IsDir() { if srcContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Source %s is a folder. Use %s... argument to copy this folder and its contents recursively.", srcURL, srcURL))
} }
if !srcContent.Type.IsRegular() { if !srcContent.Type.IsRegular() {
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a file.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a file.")
} }
} }
} }
@@ -192,7 +191,7 @@ func checkCopySyntaxTypeD(srcURLs []string, tgtURL string) {
// Target exist?. // Target exist?.
if err == nil { if err == nil {
if !tgtContent.Type.IsDir() { if !tgtContent.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), "Target "+tgtURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Target "+tgtURL+" is not a folder.")
} }
} }
} }
@@ -237,7 +236,7 @@ func prepareCopyURLsTypeA(sourceURL string, targetURL string) copyURLs {
} }
if !sourceContent.Type.IsRegular() { if !sourceContent.Type.IsRegular() {
// Source is not a regular file // Source is not a regular file
return copyURLs{Error: probe.NewError(errInvalidSource{URL: sourceURL})} return copyURLs{Error: probe.NewError(eInvalidSource{URL: sourceURL})}
} }
// All OK.. We can proceed. Type A // All OK.. We can proceed. Type A
sourceContent.Name = sourceURL sourceContent.Name = sourceURL
@@ -254,19 +253,19 @@ func prepareCopyURLsTypeB(sourceURL string, targetURL string) copyURLs {
} }
if !sourceContent.Type.IsRegular() { if !sourceContent.Type.IsRegular() {
// Source is not a regular file. // Source is not a regular file.
return copyURLs{Error: probe.NewError(errInvalidSource{URL: sourceURL})} return copyURLs{Error: probe.NewError(eInvalidSource{URL: sourceURL})}
} }
// All OK.. We can proceed. Type B: source is a file, target is a folder and exists. // All OK.. We can proceed. Type B: source is a file, target is a folder and exists.
{ {
sourceURLParse, err := client.Parse(sourceURL) sourceURLParse, err := client.Parse(sourceURL)
if err != nil { if err != nil {
return copyURLs{Error: probe.NewError(errInvalidSource{URL: sourceURL})} return copyURLs{Error: probe.NewError(eInvalidSource{URL: sourceURL})}
} }
targetURLParse, err := client.Parse(targetURL) targetURLParse, err := client.Parse(targetURL)
if err != nil { if err != nil {
return copyURLs{Error: probe.NewError(errInvalidTarget{URL: targetURL})} return copyURLs{Error: probe.NewError(eInvalidTarget{URL: targetURL})}
} }
targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path)) targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path))
return prepareCopyURLsTypeA(sourceURL, targetURLParse.String()) return prepareCopyURLsTypeA(sourceURL, targetURLParse.String())
@@ -281,7 +280,7 @@ func prepareCopyURLsTypeC(sourceURL, targetURL string) <-chan copyURLs {
defer close(copyURLsCh) defer close(copyURLsCh)
if !isURLRecursive(sourceURL) { if !isURLRecursive(sourceURL) {
// Source is not of recursive type. // Source is not of recursive type.
copyURLsCh <- copyURLs{Error: probe.NewError(errSourceNotRecursive{URL: sourceURL})} copyURLsCh <- copyURLs{Error: probe.NewError(eSourceNotRecursive{URL: sourceURL})}
return return
} }
@@ -296,7 +295,7 @@ func prepareCopyURLsTypeC(sourceURL, targetURL string) <-chan copyURLs {
if !sourceContent.Type.IsDir() { if !sourceContent.Type.IsDir() {
// Source is not a dir. // Source is not a dir.
copyURLsCh <- copyURLs{Error: probe.NewError(errSourceIsNotDir{URL: sourceURL})} copyURLsCh <- copyURLs{Error: probe.NewError(eSourceIsNotDir{URL: sourceURL})}
return return
} }
@@ -315,13 +314,13 @@ func prepareCopyURLsTypeC(sourceURL, targetURL string) <-chan copyURLs {
// All OK.. We can proceed. Type B: source is a file, target is a folder and exists. // All OK.. We can proceed. Type B: source is a file, target is a folder and exists.
sourceURLParse, err := client.Parse(sourceURL) sourceURLParse, err := client.Parse(sourceURL)
if err != nil { if err != nil {
copyURLsCh <- copyURLs{Error: probe.NewError(errInvalidSource{URL: sourceURL})} copyURLsCh <- copyURLs{Error: probe.NewError(eInvalidSource{URL: sourceURL})}
continue continue
} }
targetURLParse, err := client.Parse(targetURL) targetURLParse, err := client.Parse(targetURL)
if err != nil { if err != nil {
copyURLsCh <- copyURLs{Error: probe.NewError(errInvalidTarget{URL: targetURL})} copyURLsCh <- copyURLs{Error: probe.NewError(eInvalidTarget{URL: targetURL})}
continue continue
} }
@@ -331,7 +330,7 @@ func prepareCopyURLsTypeC(sourceURL, targetURL string) <-chan copyURLs {
sourceContentURL := sourceURLDelimited + sourceContentName sourceContentURL := sourceURLDelimited + sourceContentName
sourceContentParse, err := client.Parse(sourceContentURL) sourceContentParse, err := client.Parse(sourceContentURL)
if err != nil { if err != nil {
copyURLsCh <- copyURLs{Error: probe.NewError(errInvalidSource{URL: sourceContentName})} copyURLsCh <- copyURLs{Error: probe.NewError(eInvalidSource{URL: sourceContentName})}
continue continue
} }
@@ -353,7 +352,7 @@ func prepareCopyURLsTypeD(sourceURLs []string, targetURL string) <-chan copyURLs
if sourceURLs == nil { if sourceURLs == nil {
// Source list is empty. // Source list is empty.
copyURLsCh <- copyURLs{Error: probe.NewError(errSourceListEmpty)} copyURLsCh <- copyURLs{Error: errSourceListEmpty.Trace()}
return return
} }
@@ -391,7 +390,7 @@ func prepareCopyURLs(sourceURLs []string, targetURL string) <-chan copyURLs {
copyURLsCh <- cURLs copyURLsCh <- cURLs
} }
default: default:
copyURLsCh <- copyURLs{Error: probe.NewError(errInvalidArgument)} copyURLsCh <- copyURLs{Error: errInvalidArgument.Trace()}
} }
}(sourceURLs, targetURL, copyURLsCh) }(sourceURLs, targetURL, copyURLsCh)

View File

@@ -74,7 +74,7 @@ func mainDiff(ctx *cli.Context) {
fatalIf(err.Trace(secondArg), "Unable to parse second argument "+secondArg+".") fatalIf(err.Trace(secondArg), "Unable to parse second argument "+secondArg+".")
if isURLRecursive(secondURL) { if isURLRecursive(secondURL) {
fatalIf(probe.NewError(errInvalidArgument), "Second argument "+secondURL+" cannot be recursive.") fatalIf(errInvalidArgument.Trace(), "Second argument "+secondURL+" cannot be recursive.")
} }
newFirstURL := stripRecursiveURL(firstURL) newFirstURL := stripRecursiveURL(firstURL)
for diff := range doDiffCmd(newFirstURL, secondURL, isURLRecursive(firstURL)) { for diff := range doDiffCmd(newFirstURL, secondURL, isURLRecursive(firstURL)) {

17
diff.go
View File

@@ -18,7 +18,6 @@ package main
import ( import (
"encoding/json" "encoding/json"
"errors"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
@@ -65,7 +64,7 @@ func (d DiffMessage) String() string {
case "size": case "size":
msg = "" + d.FirstURL + "" + " and " + "" + d.SecondURL + "" + " - differ in size." msg = "" + d.FirstURL + "" + " and " + "" + d.SecondURL + "" + " - differ in size."
default: default:
fatalIf(probe.NewError(errors.New("")), "Unhandled difference between "+d.FirstURL+" and "+d.SecondURL+".") fatalIf(errDummy.Trace(), "Unhandled difference between "+d.FirstURL+" and "+d.SecondURL+".")
} }
return console.Colorize("Diff", msg) return console.Colorize("Diff", msg)
} }
@@ -77,13 +76,13 @@ func (d DiffMessage) String() string {
// urlJoinPath Join a path to existing URL // urlJoinPath Join a path to existing URL
func urlJoinPath(url1, url2 string) (string, *probe.Error) { func urlJoinPath(url1, url2 string) (string, *probe.Error) {
u1, err := client.Parse(url1) u1, e := client.Parse(url1)
if err != nil { if e != nil {
return "", probe.NewError(err) return "", probe.NewError(e)
} }
u2, err := client.Parse(url2) u2, e := client.Parse(url2)
if err != nil { if e != nil {
return "", probe.NewError(err) return "", probe.NewError(e)
} }
u1.Path = filepath.Join(u1.Path, u2.Path) u1.Path = filepath.Join(u1.Path, u2.Path)
return u1.String(), nil return u1.String(), nil
@@ -173,7 +172,7 @@ func doDiffObjects(firstURL, secondURL string, ch chan DiffMessage) {
} }
default: default:
ch <- DiffMessage{ ch <- DiffMessage{
Error: probe.NewError(errNotAnObject{url: firstURL}), Error: probe.NewError(eNotAnObject{URL: firstURL}),
} }
return return
} }

View File

@@ -50,12 +50,12 @@ func getHostConfig(URL string) (hostConfig, *probe.Error) {
for globURL, hostCfg := range config.Hosts { for globURL, hostCfg := range config.Hosts {
match, err := filepath.Match(globURL, url.Host) match, err := filepath.Match(globURL, url.Host)
if err != nil { if err != nil {
return hostConfig{}, probe.NewError(errInvalidGlobURL{glob: globURL, request: URL}) return hostConfig{}, probe.NewError(eInvalidGlobURL{glob: globURL, request: URL})
} }
if match { if match {
return hostCfg, nil return hostCfg, nil
} }
} }
} }
return hostConfig{}, probe.NewError(errNoMatchingHost{url: URL}) return hostConfig{}, probe.NewError(eNoMatchingHost{URL: URL})
} }

View File

@@ -17,7 +17,6 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"net/http" "net/http"
"os" "os"
@@ -150,7 +149,7 @@ VERSION:
{{end}} {{end}}
` `
app.CommandNotFound = func(ctx *cli.Context, command string) { app.CommandNotFound = func(ctx *cli.Context, command string) {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Command %s not found.", command)) fatalIf(errDummy.Trace(), fmt.Sprintf("Command %s not found.", command))
} }
return app return app
} }

View File

@@ -18,7 +18,6 @@ package main
import ( import (
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"path/filepath" "path/filepath"
"strings" "strings"
@@ -84,7 +83,7 @@ func checkMirrorSyntax(ctx *cli.Context) {
/****** Generic rules *******/ /****** Generic rules *******/
// Source cannot be a folder (except when recursive) // Source cannot be a folder (except when recursive)
if !isURLRecursive(srcURL) { if !isURLRecursive(srcURL) {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Source %s is not recursive. Use %s... as argument to mirror recursively.", srcURL, srcURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Source %s is not recursive. Use %s... as argument to mirror recursively.", srcURL, srcURL))
} }
// Recursive source URL. // Recursive source URL.
newSrcURL := stripRecursiveURL(srcURL) newSrcURL := stripRecursiveURL(srcURL)
@@ -92,17 +91,17 @@ func checkMirrorSyntax(ctx *cli.Context) {
fatalIf(err.Trace(srcURL), "Unable to stat source "+newSrcURL+".") fatalIf(err.Trace(srcURL), "Unable to stat source "+newSrcURL+".")
if srcContent.Type.IsRegular() { // Ellipses is supported only for folders. if srcContent.Type.IsRegular() { // Ellipses is supported only for folders.
fatalIf(probe.NewError(errInvalidArgument), "Source "+srcURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Source "+srcURL+" is not a folder.")
} }
if len(tgtURLs) == 0 && tgtURLs == nil { if len(tgtURLs) == 0 && tgtURLs == nil {
fatalIf(probe.NewError(errInvalidArgument), "Invalid number of target arguments to mirror command.") fatalIf(errInvalidArgument.Trace(), "Invalid number of target arguments to mirror command.")
} }
for _, tgtURL := range tgtURLs { for _, tgtURL := range tgtURLs {
// Recursive URLs are not allowed in target. // Recursive URLs are not allowed in target.
if isURLRecursive(tgtURL) { if isURLRecursive(tgtURL) {
fatalIf(probe.NewError(errors.New("")), fmt.Sprintf("Recursive option is not supported for target %s argument.", tgtURL)) fatalIf(errDummy.Trace(), fmt.Sprintf("Recursive option is not supported for target %s argument.", tgtURL))
} }
url, e := client.Parse(tgtURL) url, e := client.Parse(tgtURL)
@@ -110,14 +109,14 @@ func checkMirrorSyntax(ctx *cli.Context) {
if url.Host != "" { if url.Host != "" {
if url.Path == string(url.Separator) { if url.Path == string(url.Separator) {
fatalIf(probe.NewError(errInvalidArgument), fmt.Sprintf("Target %s does not contain bucket name.", tgtURL)) fatalIf(errInvalidArgument.Trace(), fmt.Sprintf("Target %s does not contain bucket name.", tgtURL))
} }
} }
_, content, err := url2Stat(tgtURL) _, content, err := url2Stat(tgtURL)
fatalIf(err.Trace(tgtURL), "Unable to stat target "+tgtURL+".") fatalIf(err.Trace(tgtURL), "Unable to stat target "+tgtURL+".")
if !content.Type.IsDir() { if !content.Type.IsDir() {
fatalIf(probe.NewError(errInvalidArgument), "Target "+tgtURL+" is not a folder.") fatalIf(errInvalidArgument.Trace(), "Target "+tgtURL+" is not a folder.")
} }
} }
} }
@@ -216,7 +215,7 @@ func prepareMirrorURLs(sourceURL string, targetURLs []string) <-chan mirrorURLs
} }
// if one of the targets is not dir exit // if one of the targets is not dir exit
if !targetContent.Type.IsDir() { if !targetContent.Type.IsDir() {
mirrorURLsCh <- mirrorURLs{Error: probe.NewError(errInvalidTarget{URL: targetURL})} mirrorURLsCh <- mirrorURLs{Error: probe.NewError(eInvalidTarget{URL: targetURL})}
return return
} }
// special case, be extremely careful before changing this behavior - will lead to data loss // special case, be extremely careful before changing this behavior - will lead to data loss

View File

@@ -17,7 +17,6 @@
package main package main
import ( import (
"errors"
"os" "os"
"sort" "sort"
"strings" "strings"
@@ -93,7 +92,7 @@ func clearSession(sid string) {
} }
if !isSession(sid) { if !isSession(sid) {
fatalIf(probe.NewError(errors.New("")), "Session "+sid+" not found.") fatalIf(errDummy.Trace(), "Session "+sid+" not found.")
} }
session, err := loadSessionV2(sid) session, err := loadSessionV2(sid)
@@ -138,7 +137,7 @@ func mainSession(ctx *cli.Context) {
sid := strings.TrimSpace(ctx.Args().Tail().First()) sid := strings.TrimSpace(ctx.Args().Tail().First())
if !isSession(sid) { if !isSession(sid) {
fatalIf(probe.NewError(errors.New("")), "Session "+sid+" not found.") fatalIf(errDummy.Trace(), "Session "+sid+" not found.")
} }
s, err := loadSessionV2(sid) s, err := loadSessionV2(sid)

View File

@@ -209,7 +209,7 @@ func (s *sessionV2) Delete() *probe.Error {
// loadSession - reads session file if exists and re-initiates internal variables // loadSession - reads session file if exists and re-initiates internal variables
func loadSessionV2(sid string) (*sessionV2, *probe.Error) { func loadSessionV2(sid string) (*sessionV2, *probe.Error) {
if !isSessionDirExists() { if !isSessionDirExists() {
return nil, probe.NewError(errInvalidArgument) return nil, errInvalidArgument.Trace()
} }
sessionFile, err := getSessionFile(sid) sessionFile, err := getSessionFile(sid)
if err != nil { if err != nil {
@@ -255,7 +255,7 @@ func isCopiedFactory(lastCopied string) func(string) bool {
copied := true // closure copied := true // closure
return func(sourceURL string) bool { return func(sourceURL string) bool {
if sourceURL == "" { if sourceURL == "" {
fatalIf(probe.NewError(errInvalidArgument), "Empty source argument passed.") fatalIf(errInvalidArgument.Trace(), "Empty source argument passed.")
} }
if lastCopied == "" { if lastCopied == "" {
return false return false

View File

@@ -16,73 +16,73 @@
package main package main
import "errors" import (
"errors"
type errNotAnObject struct { "github.com/minio/minio/pkg/probe"
url string )
}
func (e errNotAnObject) Error() string { var (
return "URL: " + e.url + " not an object" errDummy = probe.NewError(errors.New("")).Untrace()
} errInvalidArgument = probe.NewError(errors.New("Invalid arguments provided, cannot proceed.")).Untrace()
errSourceListEmpty = probe.NewError(errors.New("Source argument list is empty.")).Untrace()
)
var errInvalidArgument = errors.New("Invalid arguments provided, cannot proceed.") type eInvalidGlobURL struct {
type errInvalidGlobURL struct {
glob string glob string
request string request string
} }
func (e errInvalidGlobURL) Error() string { func (e eInvalidGlobURL) Error() string {
return "Error reading glob URL " + e.glob + " while comparing with " + e.request + "." return "Error reading glob URL " + e.glob + " while comparing with " + e.request + "."
} }
type errNoMatchingHost struct { type eInvalidURL struct {
url string
}
func (e errNoMatchingHost) Error() string {
return "No matching host found for the given url " + e.url + "."
}
type errInitClient struct {
url string
}
func (e errInitClient) Error() string {
return "Unable to initialize client for url " + e.url + "."
}
type errInvalidURL struct {
URL string URL string
} }
func (e errInvalidURL) Error() string { func (e eInvalidURL) Error() string {
return "Invalid url " + e.URL return "Invalid URL " + e.URL
} }
type errInvalidSource errInvalidURL type eNoMatchingHost eInvalidURL
func (e errInvalidSource) Error() string { func (e eNoMatchingHost) Error() string {
return "No matching host found for the given URL " + e.URL + "."
}
type eInitClient eInvalidURL
func (e eInitClient) Error() string {
return "Unable to initialize client for URL " + e.URL + "."
}
type eInvalidSource eInvalidURL
func (e eInvalidSource) Error() string {
return "Invalid source " + e.URL return "Invalid source " + e.URL
} }
type errInvalidTarget errInvalidURL type eInvalidTarget eInvalidURL
func (e errInvalidTarget) Error() string { func (e eInvalidTarget) Error() string {
return "Invalid target " + e.URL return "Invalid target " + e.URL
} }
type errSourceNotRecursive errInvalidURL type eSourceNotRecursive eInvalidURL
func (e errSourceNotRecursive) Error() string { func (e eSourceNotRecursive) Error() string {
return "Source " + e.URL + " is not recursive." return "Source " + e.URL + " is not recursive."
} }
type errSourceIsNotDir errInvalidURL type eSourceIsNotDir eInvalidURL
func (e errSourceIsNotDir) Error() string { func (e eSourceIsNotDir) Error() string {
return "Source " + e.URL + " is not a folder." return "Source " + e.URL + " is not a folder."
} }
var errSourceListEmpty = errors.New("Source list is empty.") type eNotAnObject eInvalidURL
func (e eNotAnObject) Error() string {
return "" + e.URL + " is not an object."
}

2
url.go
View File

@@ -54,7 +54,7 @@ func getCanonicalizedURL(arg string, aliases map[string]string) (urlStr string,
} }
if _, err := client.Parse(urlStr); err != nil { if _, err := client.Parse(urlStr); err != nil {
// Not a valid URL. Return error // Not a valid URL. Return error
return "", probe.NewError(errInvalidURL{urlStr}) return "", probe.NewError(eInvalidURL{urlStr})
} }
return urlStr, nil return urlStr, nil
} }

View File

@@ -48,8 +48,8 @@
"canonical": "github.com/minio/minio/pkg/probe", "canonical": "github.com/minio/minio/pkg/probe",
"comment": "", "comment": "",
"local": "vendor/github.com/minio/minio/pkg/probe", "local": "vendor/github.com/minio/minio/pkg/probe",
"revision": "4d1f38d28cabcfa321e9d88acef67e16e7bafb2a", "revision": "2f260adc69701f89a38fc8a27cde4b868dfe9ddf",
"revisionTime": "2015-08-22T18:37:56-07:00" "revisionTime": "2015-08-24T03:43:13-07:00"
}, },
{ {
"canonical": "github.com/minio/minio/pkg/quick", "canonical": "github.com/minio/minio/pkg/quick",

View File

@@ -108,18 +108,19 @@ func (e *Error) trace(fields ...string) *Error {
} }
// Untrace erases last trace entry. // Untrace erases last trace entry.
func (e *Error) Untrace() { func (e *Error) Untrace() *Error {
if e == nil { if e == nil {
return return nil
} }
e.lock.Lock() e.lock.Lock()
defer e.lock.Unlock() defer e.lock.Unlock()
l := len(e.CallTrace) l := len(e.CallTrace)
if l == 0 { if l == 0 {
return return nil
} }
e.CallTrace = e.CallTrace[:l-1] e.CallTrace = e.CallTrace[:l-1]
return e
} }
// ToGoError returns original error message. // ToGoError returns original error message.

View File

@@ -17,7 +17,6 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"regexp" "regexp"
"runtime" "runtime"
@@ -92,7 +91,7 @@ func checkGolangRuntimeVersion() {
v1 := newVersion(getNormalizedGolangVersion()) v1 := newVersion(getNormalizedGolangVersion())
v2 := newVersion(minGolangVersion) v2 := newVersion(minGolangVersion)
if v1.LessThan(v2) { if v1.LessThan(v2) {
errorIf(probe.NewError(errors.New("")), errorIf(errDummy.Trace(),
"Old Golang runtime version "+v1.String()+" detected., mc requires minimum go1.5 or later.") "Old Golang runtime version "+v1.String()+" detected., mc requires minimum go1.5 or later.")
} }
} }

View File

@@ -58,12 +58,12 @@ func mainVersion(ctxx *cli.Context) {
Format string `json:"format"` Format string `json:"format"`
} }
if globalJSONFlag { if globalJSONFlag {
tB, err := json.Marshal( tB, e := json.Marshal(
struct { struct {
Version Version `json:"version"` Version Version `json:"version"`
}{Version: Version{t, "RFC3339Nano"}}, }{Version: Version{t, "RFC3339Nano"}},
) )
fatalIf(probe.NewError(err), "Unable to construct version string.") fatalIf(probe.NewError(e), "Unable to construct version string.")
console.Println(string(tB)) console.Println(string(tB))
return return
} }