From 6f7d04721a29146ff7f34d994b515b910e00d94c Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Mon, 4 Sep 2017 11:35:54 +0100 Subject: [PATCH 1/8] Switch cmd/amtool to kingpin --- artifacts/generate_amtool_artifacts.go | 17 - cli/alert.go | 66 +- cli/check_config.go | 33 +- cli/config.go | 32 +- cli/format/format.go | 4 +- cli/root.go | 108 ++- cli/silence.go | 24 +- cli/silence_add.go | 102 +- cli/silence_expire.go | 29 +- cli/silence_import.go | 47 +- cli/silence_query.go | 83 +- cli/silence_update.go | 82 +- cli/utils.go | 26 +- {artifacts => cmd/amtool}/README.md | 10 +- vendor/github.com/alecthomas/kingpin/COPYING | 19 + .../github.com/alecthomas/kingpin/README.md | 762 +++++++++++++++ .../github.com/alecthomas/kingpin/actions.go | 45 + vendor/github.com/alecthomas/kingpin/app.go | 678 ++++++++++++++ vendor/github.com/alecthomas/kingpin/args.go | 60 ++ .../alecthomas/kingpin/camelcase.go | 90 ++ .../github.com/alecthomas/kingpin/clause.go | 331 +++++++ vendor/github.com/alecthomas/kingpin/cmd.go | 317 +++++++ .../alecthomas/kingpin/completions.go | 34 + vendor/github.com/alecthomas/kingpin/doc.go | 68 ++ vendor/github.com/alecthomas/kingpin/flags.go | 141 +++ .../github.com/alecthomas/kingpin/global.go | 96 ++ .../alecthomas/kingpin/guesswidth.go | 9 + .../alecthomas/kingpin/guesswidth_unix.go | 38 + .../alecthomas/kingpin/i18n_en_AU.go | 3 + .../github.com/alecthomas/kingpin/i18n_fr.go | 3 + .../alecthomas/kingpin/i18n_init.go | 82 ++ vendor/github.com/alecthomas/kingpin/model.go | 298 ++++++ .../github.com/alecthomas/kingpin/parser.go | 437 +++++++++ .../github.com/alecthomas/kingpin/resolver.go | 157 ++++ .../github.com/alecthomas/kingpin/struct.go | 196 ++++ .../alecthomas/kingpin/templates.go | 171 ++++ vendor/github.com/alecthomas/kingpin/usage.go | 279 ++++++ .../github.com/alecthomas/kingpin/values.go | 460 +++++++++ .../github.com/alecthomas/kingpin/values.json | 25 + .../alecthomas/kingpin/values_generated.go | 882 ++++++++++++++++++ vendor/vendor.json | 15 + 41 files changed, 5941 insertions(+), 418 deletions(-) delete mode 100644 artifacts/generate_amtool_artifacts.go rename {artifacts => cmd/amtool}/README.md (55%) create mode 100644 vendor/github.com/alecthomas/kingpin/COPYING create mode 100644 vendor/github.com/alecthomas/kingpin/README.md create mode 100644 vendor/github.com/alecthomas/kingpin/actions.go create mode 100644 vendor/github.com/alecthomas/kingpin/app.go create mode 100644 vendor/github.com/alecthomas/kingpin/args.go create mode 100644 vendor/github.com/alecthomas/kingpin/camelcase.go create mode 100644 vendor/github.com/alecthomas/kingpin/clause.go create mode 100644 vendor/github.com/alecthomas/kingpin/cmd.go create mode 100644 vendor/github.com/alecthomas/kingpin/completions.go create mode 100644 vendor/github.com/alecthomas/kingpin/doc.go create mode 100644 vendor/github.com/alecthomas/kingpin/flags.go create mode 100644 vendor/github.com/alecthomas/kingpin/global.go create mode 100644 vendor/github.com/alecthomas/kingpin/guesswidth.go create mode 100644 vendor/github.com/alecthomas/kingpin/guesswidth_unix.go create mode 100644 vendor/github.com/alecthomas/kingpin/i18n_en_AU.go create mode 100644 vendor/github.com/alecthomas/kingpin/i18n_fr.go create mode 100644 vendor/github.com/alecthomas/kingpin/i18n_init.go create mode 100644 vendor/github.com/alecthomas/kingpin/model.go create mode 100644 vendor/github.com/alecthomas/kingpin/parser.go create mode 100644 vendor/github.com/alecthomas/kingpin/resolver.go create mode 100644 vendor/github.com/alecthomas/kingpin/struct.go create mode 100644 vendor/github.com/alecthomas/kingpin/templates.go create mode 100644 vendor/github.com/alecthomas/kingpin/usage.go create mode 100644 vendor/github.com/alecthomas/kingpin/values.go create mode 100644 vendor/github.com/alecthomas/kingpin/values.json create mode 100644 vendor/github.com/alecthomas/kingpin/values_generated.go diff --git a/artifacts/generate_amtool_artifacts.go b/artifacts/generate_amtool_artifacts.go deleted file mode 100644 index 2b518769bb..0000000000 --- a/artifacts/generate_amtool_artifacts.go +++ /dev/null @@ -1,17 +0,0 @@ -package main - -import ( - "github.com/spf13/cobra/doc" - - "github.com/prometheus/alertmanager/cli" -) - -func main() { - cli.RootCmd.GenBashCompletionFile("amtool_completion.sh") - header := &doc.GenManHeader{ - Title: "amtool", - Section: "1", - } - - doc.GenManTree(cli.RootCmd, header, ".") -} diff --git a/cli/alert.go b/cli/alert.go index 4a75f1f35a..033a5e868f 100644 --- a/cli/alert.go +++ b/cli/alert.go @@ -6,17 +6,15 @@ import ( "fmt" "net/http" "net/url" - "path" "strings" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/cli/format" "github.com/prometheus/alertmanager/dispatch" "github.com/prometheus/alertmanager/pkg/parse" "github.com/prometheus/alertmanager/types" "github.com/prometheus/common/model" - "github.com/spf13/cobra" - "github.com/spf13/viper" ) type alertmanagerAlertResponse struct { @@ -37,11 +35,8 @@ type alertBlock struct { Alerts []*dispatch.APIAlert `json:"alerts"` } -// alertCmd represents the alert command -var alertCmd = &cobra.Command{ - Use: "alert", - Short: "View and search through current alerts", - Long: `View and search through current alerts. +/* + View and search through current alerts. Amtool has a simplified prometheus query syntax, but contains robust support for bash variable expansions. The non-option section of arguments constructs a list @@ -63,35 +58,23 @@ var alertCmd = &cobra.Command{ As well as direct equality, regex matching is also supported. The '=~' syntax (similar to prometheus) is used to represent a regex match. Regex matching can be used in combination with a direct match. - `, - Run: CommandWrapper(queryAlerts), -} - -var alertQueryCmd = &cobra.Command{ - Use: "query", - Short: "View and search through current alerts", - Long: alertCmd.Long, - RunE: queryAlerts, -} +*/ +var ( + alertCmd = app.Command("alert", "View and search through current alerts") + alertQueryCmd = alertCmd.Command("query", "View and search through current alerts").Default() + expired = alertQueryCmd.Flag("expired", "Show expired alerts as well as active").Bool() + showSilenced = alertQueryCmd.Flag("silenced", "Show silenced alerts").Short('s').Bool() + alertQuery = alertQueryCmd.Arg("matcher-groups", "Query filter").Strings() +) func init() { - RootCmd.AddCommand(alertCmd) - alertCmd.AddCommand(alertQueryCmd) - alertQueryCmd.Flags().Bool("expired", false, "Show expired alerts as well as active") - alertQueryCmd.Flags().BoolP("silenced", "s", false, "Show silenced alerts") - viper.BindPFlag("expired", alertQueryCmd.Flags().Lookup("expired")) - viper.BindPFlag("silenced", alertQueryCmd.Flags().Lookup("silenced")) + alertQueryCmd.Action(queryAlerts) } func fetchAlerts(filter string) ([]*dispatch.APIAlert, error) { alertResponse := alertmanagerAlertResponse{} - u, err := GetAlertmanagerURL() - if err != nil { - return []*dispatch.APIAlert{}, err - } - - u.Path = path.Join(u.Path, "/api/v1/alerts/groups") + u := GetAlertmanagerURL("/api/v1/alerts/groups") u.RawQuery = "filter=" + url.QueryEscape(filter) res, err := http.Get(u.String()) @@ -123,23 +106,20 @@ func flattenAlertOverview(overview []*alertGroup) []*dispatch.APIAlert { return alerts } -func queryAlerts(cmd *cobra.Command, args []string) error { - expired := viper.GetBool("expired") - showSilenced := viper.GetBool("silenced") - +func queryAlerts(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { var filterString = "" - if len(args) == 1 { + if len(*alertQuery) == 1 { // If we only have one argument then it's possible that the user wants me to assume alertname= // Attempt to use the parser to pare the argument // If the parser fails then we likely don't have a (=|=~|!=|!~) so lets prepend `alertname=` to the front - _, err := parse.Matcher(args[0]) + _, err := parse.Matcher((*alertQuery)[0]) if err != nil { - filterString = fmt.Sprintf("{alertname=%s}", args[0]) + filterString = fmt.Sprintf("{alertname=%s}", (*alertQuery)[0]) } else { - filterString = fmt.Sprintf("{%s}", strings.Join(args, ",")) + filterString = fmt.Sprintf("{%s}", strings.Join(*alertQuery, ",")) } - } else if len(args) > 1 { - filterString = fmt.Sprintf("{%s}", strings.Join(args, ",")) + } else if len(*alertQuery) > 1 { + filterString = fmt.Sprintf("{%s}", strings.Join(*alertQuery, ",")) } fetchedAlerts, err := fetchAlerts(filterString) @@ -150,13 +130,13 @@ func queryAlerts(cmd *cobra.Command, args []string) error { displayAlerts := []*dispatch.APIAlert{} for _, alert := range fetchedAlerts { // If we are only returning current alerts and this one has already expired skip it - if !expired { + if !*expired { if !alert.EndsAt.IsZero() && alert.EndsAt.Before(time.Now()) { continue } } - if !showSilenced { + if !*showSilenced { // If any silence mutes this alert don't show it if alert.Status.State == types.AlertStateSuppressed && len(alert.Status.SilencedBy) > 0 { continue @@ -166,7 +146,7 @@ func queryAlerts(cmd *cobra.Command, args []string) error { displayAlerts = append(displayAlerts, alert) } - formatter, found := format.Formatters[viper.GetString("output")] + formatter, found := format.Formatters[*output] if !found { return errors.New("unknown output formatter") } diff --git a/cli/check_config.go b/cli/check_config.go index 732e5df0a7..8f44d4330f 100644 --- a/cli/check_config.go +++ b/cli/check_config.go @@ -3,32 +3,31 @@ package cli import ( "fmt" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/config" "github.com/prometheus/alertmanager/template" - "github.com/spf13/cobra" ) // alertCmd represents the alert command -var checkConfigCmd = &cobra.Command{ - Use: "check-config file1 [file2] ...", - Args: cobra.MinimumNArgs(1), - Short: "Validate alertmanager config files", - Long: `Validate alertmanager config files - -Will validate the syntax and schema for alertmanager config file -and associated templates. Non existing templates will not trigger -errors`, - RunE: checkConfig, -} +var ( + checkConfigCmd = app.Command("check-config", "Validate alertmanager config files") + checkFiles = checkConfigCmd.Arg("check-files", "Files to be validated").ExistingFiles() +) func init() { - RootCmd.AddCommand(checkConfigCmd) - checkConfigCmd.Flags() + checkConfigCmd.Action(checkConfig) } -func checkConfig(cmd *cobra.Command, args []string) error { - cmd.SilenceUsage = true - return CheckConfig(args) +/* +Validate alertmanager config files + +Will validate the syntax and schema for alertmanager config file +and associated templates. Non existing templates will not trigger +errors +*/ + +func checkConfig(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { + return CheckConfig(*checkFiles) } func CheckConfig(args []string) error { diff --git a/cli/config.go b/cli/config.go index 271b2ca8e3..7114081a5f 100644 --- a/cli/config.go +++ b/cli/config.go @@ -5,13 +5,11 @@ import ( "errors" "fmt" "net/http" - "path" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/cli/format" "github.com/prometheus/alertmanager/config" - "github.com/spf13/cobra" - "github.com/spf13/viper" ) // Config is the response type of alertmanager config endpoint @@ -43,31 +41,13 @@ type alertmanagerStatusResponse struct { Error string `json:"error,omitempty"` } -// alertCmd represents the alert command -var configCmd = &cobra.Command{ - Use: "config", - Short: "View the running config", - Long: `View current config - -The amount of output is controlled by the output selection flag: - - Simple: Print just the running config - - Extended: Print the running config as well as uptime and all version info - - Json: Print entire config object as json`, - RunE: queryConfig, -} - -func init() { - RootCmd.AddCommand(configCmd) -} +// configCmd represents the config command +var configCmd = app.Command("config", "View the running config").Action(queryConfig) func fetchConfig() (Config, error) { configResponse := alertmanagerStatusResponse{} - u, err := GetAlertmanagerURL() - if err != nil { - return Config{}, err - } - u.Path = path.Join(u.Path, "/api/v1/status") + u := GetAlertmanagerURL("/api/v1/status") res, err := http.Get(u.String()) if err != nil { return Config{}, err @@ -87,13 +67,13 @@ func fetchConfig() (Config, error) { return configResponse.Data, nil } -func queryConfig(cmd *cobra.Command, args []string) error { +func queryConfig(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { config, err := fetchConfig() if err != nil { return err } - formatter, found := format.Formatters[viper.GetString("output")] + formatter, found := format.Formatters[*output] if !found { return errors.New("unknown output formatter") } diff --git a/cli/format/format.go b/cli/format/format.go index 29b40fd7d0..b180f66e22 100644 --- a/cli/format/format.go +++ b/cli/format/format.go @@ -7,7 +7,6 @@ import ( "github.com/prometheus/alertmanager/config" "github.com/prometheus/alertmanager/dispatch" "github.com/prometheus/alertmanager/types" - "github.com/spf13/viper" ) const DefaultDateFormat = "2006-01-02 15:04:05 MST" @@ -46,6 +45,7 @@ type Formatter interface { var Formatters = map[string]Formatter{} func FormatDate(input time.Time) string { - dateformat := viper.GetString("date.format") + // FIX-BEFORE-MERGE + dateformat := DefaultDateFormat //viper.GetString("date.format") return input.Format(dateformat) } diff --git a/cli/root.go b/cli/root.go index 11b3ef18ff..fa276e679f 100644 --- a/cli/root.go +++ b/cli/root.go @@ -1,19 +1,64 @@ package cli import ( - "fmt" + "io/ioutil" "os" - "github.com/prometheus/alertmanager/cli/format" - "github.com/spf13/cobra" - "github.com/spf13/viper" + "github.com/alecthomas/kingpin" + "github.com/prometheus/common/version" + "gopkg.in/yaml.v2" ) -// RootCmd represents the base command when called without any subcommands -var RootCmd = &cobra.Command{ - Use: "amtool", - Short: "Alertmanager CLI", - Long: `View and modify the current Alertmanager state. +var ( + app = kingpin.New("amtool", "Alertmanager CLI").DefaultEnvars() + verbose = app.Flag("verbose", "Verbose running information").Short('v').Bool() + alertmanagerUrl = app.Flag("alertmanager.url", "Alertmanager to talk to").Required().URL() + output = app.Flag("output", "Output formatter (simple, extended, json)").Default("simple").Enum("simple", "extended", "json") +) + +type amtoolConfigResolver struct { + configData []map[string]string +} + +func newConfigResolver() amtoolConfigResolver { + files := []string{ + os.ExpandEnv("$HOME/.config/amtool/config.yml"), + "/etc/amtool/config.yml", + } + + resolver := amtoolConfigResolver{ + configData: make([]map[string]string, 0), + } + for _, f := range files { + b, err := ioutil.ReadFile(f) + if err != nil { + if os.IsNotExist(err) { + continue + } + panic(err) + } + var config map[string]string + err = yaml.Unmarshal(b, &config) + if err != nil { + panic(err) + } + resolver.configData = append(resolver.configData, config) + } + + return resolver +} + +func (r amtoolConfigResolver) Resolve(key string, context *kingpin.ParseContext) ([]string, error) { + for _, c := range r.configData { + if v, ok := c[key]; ok { + return []string{v}, nil + } + } + return nil, nil +} + +/* +`View and modify the current Alertmanager state. [Config File] @@ -36,48 +81,17 @@ The accepted config options are as follows: output Set a default output type. Options are (simple, extended, json) - `, -} +*/ // Execute adds all child commands to the root command sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { - if err := RootCmd.Execute(); err != nil { - os.Exit(1) - } -} - -func init() { - cobra.OnInitialize(initConfig) - - RootCmd.PersistentFlags().String("config", "", "config file (default is $HOME/.config/amtool/config.yml)") - viper.BindPFlag("config", RootCmd.PersistentFlags().Lookup("config")) - RootCmd.PersistentFlags().String("alertmanager.url", "", "Alertmanager to talk to") - viper.BindPFlag("alertmanager.url", RootCmd.PersistentFlags().Lookup("alertmanager.url")) - RootCmd.PersistentFlags().StringP("output", "o", "simple", "Output formatter (simple, extended, json)") - viper.BindPFlag("output", RootCmd.PersistentFlags().Lookup("output")) - RootCmd.PersistentFlags().BoolP("verbose", "v", false, "Verbose running information") - viper.BindPFlag("verbose", RootCmd.PersistentFlags().Lookup("verbose")) - viper.SetDefault("date.format", format.DefaultDateFormat) -} + app.Version(version.Print("amtool")) + app.GetFlag("help").Short('h') -// initConfig reads in config file and ENV variables if set. -func initConfig() { - viper.SetConfigName("config") // name of config file (without extension) - viper.AddConfigPath("/etc/amtool") - viper.AddConfigPath("$HOME/.config/amtool") - viper.SetEnvPrefix("AMTOOL") - viper.AutomaticEnv() // read in environment variables that match - - // If a config file is found, read it in. - cfgFile := viper.GetString("config") - if cfgFile != "" { // enable ability to specify config file via flag - viper.SetConfigFile(cfgFile) - } - err := viper.ReadInConfig() - if err == nil { - if viper.GetBool("verbose") { - fmt.Fprintln(os.Stderr, "Using config file:", viper.ConfigFileUsed()) - } + app.Resolver(newConfigResolver()) + _, err := app.Parse(os.Args[1:]) + if err != nil { + kingpin.Fatalf("%v\n", err) } } diff --git a/cli/silence.go b/cli/silence.go index c0b16e457f..7099d539a5 100644 --- a/cli/silence.go +++ b/cli/silence.go @@ -1,9 +1,6 @@ package cli import ( - "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/prometheus/alertmanager/types" ) @@ -17,20 +14,7 @@ type alertmanagerSilenceResponse struct { } // silenceCmd represents the silence command -var silenceCmd = &cobra.Command{ - Use: "silence", - Short: "Manage silences", - Long: `Add, expire or view silences. For more information and additional flags see query help`, - Run: CommandWrapper(query), -} - -func init() { - RootCmd.AddCommand(silenceCmd) - silenceCmd.PersistentFlags().BoolP("quiet", "q", false, "Only show silence ids") - viper.BindPFlag("quiet", silenceCmd.PersistentFlags().Lookup("quiet")) - silenceCmd.AddCommand(addCmd) - silenceCmd.AddCommand(expireCmd) - silenceCmd.AddCommand(queryCmd) - silenceCmd.AddCommand(importCmd) - silenceCmd.AddCommand(updateCmd) -} +var ( + silenceCmd = app.Command("silence", "Add, expire or view silences. For more information and additional flags see query help") + silenceQuiet = silenceCmd.Flag("quiet", "Only show silence ids").Short('q').Bool() +) diff --git a/cli/silence_add.go b/cli/silence_add.go index 6d784979ba..3a64948146 100644 --- a/cli/silence_add.go +++ b/cli/silence_add.go @@ -7,14 +7,10 @@ import ( "fmt" "net/http" "os/user" - "path" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/types" - "github.com/prometheus/common/model" - "github.com/spf13/cobra" - flag "github.com/spf13/pflag" - "github.com/spf13/viper" ) type addResponse struct { @@ -26,11 +22,30 @@ type addResponse struct { Error string `json:"error,omitempty"` } -var addFlags *flag.FlagSet -var addCmd = &cobra.Command{ - Use: "add", - Short: "Add silence", - Long: `Add a new alertmanager silence +func username() string { + user, err := user.Current() + if err != nil { + return "" + } + return user.Username +} + +var ( + addCmd = silenceCmd.Command("add", "Add a new alertmanager silence") + author = addCmd.Flag("author", "Username for CreatedBy field").Short('a').Default(username()).String() + requireComment = addCmd.Flag("require-comment", "Require comment to be set").Hidden().Default("true").Bool() + expires = addCmd.Flag("expires", "Duration of silence (100h)").Short('e').Default("1h").Duration() + expireOn = addCmd.Flag("expire-on", "Expire at a certain time (Overwrites expires) RFC3339 format 2006-01-02T15:04:05Z07:00").String() + comment = addCmd.Flag("comment", "A comment to help describe the silence").Short('c').String() + addArgs = addCmd.Arg("matcher-groups", "Query filter").Strings() +) + +func init() { + addCmd.Action(add) +} + +/* +Add a new alertmanager silence Amtool uses a simplified prometheus syntax to represent silences. The non-option section of arguments constructs a list of "Matcher Groups" @@ -52,35 +67,12 @@ var addCmd = &cobra.Command{ As well as direct equality, regex matching is also supported. The '=~' syntax (similar to prometheus) is used to represent a regex match. Regex matching can be used in combination with a direct match. - `, - Run: CommandWrapper(add), -} - -func init() { - var username string +*/ - user, err := user.Current() - if err != nil { - fmt.Printf("failed to get the current user, specify one with --author: %v\n", err) - } else { - username = user.Username - } - - addCmd.Flags().StringP("author", "a", username, "Username for CreatedBy field") - addCmd.Flags().StringP("expires", "e", "1h", "Duration of silence (100h)") - addCmd.Flags().String("expire-on", "", "Expire at a certain time (Overwrites expires) RFC3339 format 2006-01-02T15:04:05Z07:00") - addCmd.Flags().StringP("comment", "c", "", "A comment to help describe the silence") - viper.BindPFlag("author", addCmd.Flags().Lookup("author")) - viper.BindPFlag("expires", addCmd.Flags().Lookup("expires")) - viper.BindPFlag("comment", addCmd.Flags().Lookup("comment")) - viper.SetDefault("comment_required", false) - addFlags = addCmd.Flags() -} - -func add(cmd *cobra.Command, args []string) error { +func add(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { var err error - matchers, err := parseMatchers(args) + matchers, err := parseMatchers(*addArgs) if err != nil { return err } @@ -89,35 +81,17 @@ func add(cmd *cobra.Command, args []string) error { return fmt.Errorf("no matchers specified") } - expireOn, err := addFlags.GetString("expire-on") - if err != nil { - return err - } - - expires := viper.GetString("expires") var endsAt time.Time - - if expireOn != "" { - endsAt, err = time.Parse(time.RFC3339, expireOn) + if *expireOn != "" { + endsAt, err = time.Parse(time.RFC3339, *expireOn) if err != nil { return err } } else { - duration, err := model.ParseDuration(expires) - if err != nil { - return err - } - if duration == 0 { - return fmt.Errorf("silence duration must be greater than 0") - } - endsAt = time.Now().UTC().Add(time.Duration(duration)) + endsAt = time.Now().UTC().Add(*expires) } - author := viper.GetString("author") - comment := viper.GetString("comment") - commentRequired := viper.GetBool("comment_required") - - if commentRequired && comment == "" { + if *requireComment && *comment == "" { return errors.New("comment required by config") } @@ -130,8 +104,8 @@ func add(cmd *cobra.Command, args []string) error { Matchers: typeMatchers, StartsAt: time.Now().UTC(), EndsAt: endsAt, - CreatedBy: author, - Comment: comment, + CreatedBy: *author, + Comment: *comment, } silenceId, err := addSilence(&silence) @@ -144,14 +118,10 @@ func add(cmd *cobra.Command, args []string) error { } func addSilence(silence *types.Silence) (string, error) { - u, err := GetAlertmanagerURL() - if err != nil { - return "", err - } - u.Path = path.Join(u.Path, "/api/v1/silences") + u := GetAlertmanagerURL("/api/v1/silences") buf := bytes.NewBuffer([]byte{}) - err = json.NewEncoder(buf).Encode(silence) + err := json.NewEncoder(buf).Encode(silence) if err != nil { return "", err } diff --git a/cli/silence_expire.go b/cli/silence_expire.go index f00163ad5c..71ff90f173 100644 --- a/cli/silence_expire.go +++ b/cli/silence_expire.go @@ -6,29 +6,26 @@ import ( "net/http" "path" - "github.com/spf13/cobra" + "github.com/alecthomas/kingpin" ) -var expireCmd = &cobra.Command{ - Use: "expire", - Short: "expire silence", - Long: `expire an alertmanager silence`, - Run: CommandWrapper(expire), -} +var ( + expireCmd = silenceCmd.Command("expire", "expire an alertmanager silence") + expireIds = expireCmd.Arg("silence-ids", "Ids of silences to expire").Strings() +) -func expire(cmd *cobra.Command, args []string) error { - u, err := GetAlertmanagerURL() - if err != nil { - return err - } - basePath := path.Join(u.Path, "/api/v1/silence") +func init() { + expireCmd.Action(expire) +} - if len(args) < 1 { +func expire(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { + if len(*expireIds) < 1 { return errors.New("no silence IDs specified") } - for _, arg := range args { - u.Path = path.Join(basePath, arg) + basePath := "/api/v1/silence" + for _, id := range *expireIds { + u := GetAlertmanagerURL(path.Join(basePath, id)) req, err := http.NewRequest("DELETE", u.String(), nil) res, err := http.DefaultClient.Do(req) if err != nil { diff --git a/cli/silence_import.go b/cli/silence_import.go index 81e3a43733..e0f66ce12c 100644 --- a/cli/silence_import.go +++ b/cli/silence_import.go @@ -4,18 +4,14 @@ import ( "encoding/json" "fmt" "os" + "sync" + "github.com/alecthomas/kingpin" "github.com/pkg/errors" "github.com/prometheus/alertmanager/types" - "github.com/spf13/cobra" - flag "github.com/spf13/pflag" - "sync" ) -var importFlags *flag.FlagSet -var importCmd = &cobra.Command{ - Use: "import [JSON file]", - Short: "Import silences", +/* Long: `Import alertmanager silences from JSON file or stdin This command can be used to bulk import silences from a JSON file @@ -25,15 +21,17 @@ var importCmd = &cobra.Command{ amtool silence import foo.json JSON data can also come from stdin if no param is specified. - `, - Args: cobra.MaximumNArgs(1), - Run: CommandWrapper(bulkImport), -} +*/ + +var ( + importCmd = silenceCmd.Command("import", "Import silences") + force = importCmd.Flag("force", "Force adding new silences even if it already exists").Short('f').Bool() + workers = importCmd.Flag("worker", "Number of concurrent workers to use for import").Short('w').Default("8").Int() + importFile = importCmd.Arg("input-file", "JSON file with silences").ExistingFile() +) func init() { - importCmd.Flags().BoolP("force", "f", false, "Force adding new silences even if it already exists") - importCmd.Flags().IntP("worker", "w", 8, "Number of concurrent workers to use for import") - importFlags = importCmd.Flags() + importCmd.Action(bulkImport) } func addSilenceWorker(silencec <-chan *types.Silence, errc chan<- error) { @@ -55,20 +53,11 @@ func addSilenceWorker(silencec <-chan *types.Silence, errc chan<- error) { } } -func bulkImport(cmd *cobra.Command, args []string) error { - force, err := importFlags.GetBool("force") - if err != nil { - return err - } - - workers, err := importFlags.GetInt("worker") - if err != nil { - return err - } - +func bulkImport(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { input := os.Stdin - if len(args) == 1 { - input, err = os.Open(args[0]) + var err error + if *importFile != "" { + input, err = os.Open(*importFile) if err != nil { return err } @@ -85,7 +74,7 @@ func bulkImport(cmd *cobra.Command, args []string) error { silencec := make(chan *types.Silence, 100) errc := make(chan error, 100) var wg sync.WaitGroup - for w := 0; w < workers; w++ { + for w := 0; w < *workers; w++ { go func() { wg.Add(1) addSilenceWorker(silencec, errc) @@ -110,7 +99,7 @@ func bulkImport(cmd *cobra.Command, args []string) error { return errors.Wrap(err, "couldn't unmarshal input data, is it JSON?") } - if force { + if *force { // reset the silence ID so Alertmanager will always create new silence s.ID = "" } diff --git a/cli/silence_query.go b/cli/silence_query.go index d82df4066c..b52a39e303 100644 --- a/cli/silence_query.go +++ b/cli/silence_query.go @@ -6,24 +6,28 @@ import ( "fmt" "net/http" "net/url" - "path" "strings" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/cli/format" - "github.com/prometheus/common/model" "github.com/prometheus/alertmanager/pkg/parse" "github.com/prometheus/alertmanager/types" - "github.com/spf13/cobra" - flag "github.com/spf13/pflag" - "github.com/spf13/viper" ) -var queryFlags *flag.FlagSet -var queryCmd = &cobra.Command{ - Use: "query", - Short: "Query silences", - Long: `Query Alertmanager silences. +var ( + queryCmd = silenceCmd.Command("query", "Query Alertmanager silences.").Default() + queryExpired = queryCmd.Flag("expired", "Show expired silences as well as active").Bool() + silenceQuery = queryCmd.Arg("matcher-groups", "Query filter").Strings() + queryWithin = queryCmd.Flag("within", "Show silences that will expire within a duration").Duration() +) + +func init() { + queryCmd.Action(query) +} + +/* + Query Alertmanager silences. Amtool has a simplified prometheus query syntax, but contains robust support for bash variable expansions. The non-option section of arguments constructs a list @@ -55,25 +59,12 @@ var queryCmd = &cobra.Command{ gives all the silences due to expire within the next 8 hours. This syntax can also be combined with the label based filtering above for more flexibility. - `, - Run: CommandWrapper(query), -} - -func init() { - queryCmd.Flags().Bool("expired", false, "Show expired silences as well as active") - queryCmd.Flags().String("within", "", "Show silences that will expire within a duration") - queryFlags = queryCmd.Flags() -} +*/ func fetchSilences(filter string) ([]types.Silence, error) { silenceResponse := alertmanagerSilenceResponse{} - u, err := GetAlertmanagerURL() - if err != nil { - return []types.Silence{}, err - } - - u.Path = path.Join(u.Path, "/api/v1/silences") + u := GetAlertmanagerURL("/api/v1/silences") u.RawQuery = "filter=" + url.QueryEscape(filter) res, err := http.Get(u.String()) @@ -95,40 +86,20 @@ func fetchSilences(filter string) ([]types.Silence, error) { return silenceResponse.Data, nil } -func query(cmd *cobra.Command, args []string) error { - expired, err := queryFlags.GetBool("expired") - if err != nil { - return err - } - - within, err := queryFlags.GetString("within") - if err != nil { - return err - } - - var duration model.Duration - if within != "" { - duration, err = model.ParseDuration(within) - if err != nil { - return err - } - } - - quiet := viper.GetBool("quiet") - +func query(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { var filterString = "" - if len(args) == 1 { + if len(*silenceQuery) == 1 { // If we only have one argument then it's possible that the user wants me to assume alertname= // Attempt to use the parser to pare the argument // If the parser fails then we likely don't have a (=|=~|!=|!~) so lets prepend `alertname=` to the front - _, err := parse.Matcher(args[0]) + _, err := parse.Matcher((*silenceQuery)[0]) if err != nil { - filterString = fmt.Sprintf("{alertname=%s}", args[0]) + filterString = fmt.Sprintf("{alertname=%s}", (*silenceQuery)[0]) } else { - filterString = fmt.Sprintf("{%s}", strings.Join(args, ",")) + filterString = fmt.Sprintf("{%s}", strings.Join(*silenceQuery, ",")) } - } else if len(args) > 1 { - filterString = fmt.Sprintf("{%s}", strings.Join(args, ",")) + } else if len(*silenceQuery) > 1 { + filterString = fmt.Sprintf("{%s}", strings.Join(*silenceQuery, ",")) } fetchedSilences, err := fetchSilences(filterString) @@ -139,23 +110,23 @@ func query(cmd *cobra.Command, args []string) error { displaySilences := []types.Silence{} for _, silence := range fetchedSilences { // If we are only returning current silences and this one has already expired skip it - if !expired && silence.EndsAt.Before(time.Now()) { + if !*queryExpired && silence.EndsAt.Before(time.Now()) { continue } - if int64(duration) > 0 && silence.EndsAt.After(time.Now().UTC().Add(time.Duration(duration))) { + if int64(*queryWithin) > 0 && silence.EndsAt.After(time.Now().UTC().Add(*queryWithin)) { continue } displaySilences = append(displaySilences, silence) } - if quiet { + if *silenceQuiet { for _, silence := range displaySilences { fmt.Println(silence.ID) } } else { - formatter, found := format.Formatters[viper.GetString("output")] + formatter, found := format.Formatters[*output] if !found { return errors.New("unknown output formatter") } diff --git a/cli/silence_update.go b/cli/silence_update.go index 46c765a644..72e6130477 100644 --- a/cli/silence_update.go +++ b/cli/silence_update.go @@ -9,11 +9,9 @@ import ( "path" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/cli/format" "github.com/prometheus/alertmanager/types" - "github.com/spf13/cobra" - flag "github.com/spf13/pflag" - "github.com/spf13/viper" ) type getResponse struct { @@ -23,36 +21,31 @@ type getResponse struct { Error string `json:"error,omitempty"` } -var updateFlags *flag.FlagSet -var updateCmd = &cobra.Command{ - Use: "update ...", - Aliases: []string{"extend"}, - Args: cobra.MinimumNArgs(1), - Short: "Update silences", - Long: `Extend or update existing silence in Alertmanager.`, - Run: CommandWrapper(update), -} +var ( + updateCmd = silenceCmd.Command("update", "Update silences") + updateExpires = updateCmd.Flag("expires", "Duration of silence").Short('e').Duration() + updateExpiresOn = updateCmd.Flag("expire-on", "Expire at a certain time (Overwrites expires) RFC3339 format 2006-01-02T15:04:05Z07:00").Time(time.RFC3339) + updateComment = updateCmd.Flag("comment", "A comment to help describe the silence").Short('c').String() + updateIds = updateCmd.Arg("update-ids", "Silence IDs to update").Strings() +) + +/* +Extend or update existing silence in Alertmanager. +*/ func init() { - updateCmd.Flags().StringP("expires", "e", "", "Duration of silence (100h)") - updateCmd.Flags().String("expire-on", "", "Expire at a certain time (Overwrites expires) RFC3339 format 2006-01-02T15:04:05Z07:00") - updateCmd.Flags().StringP("comment", "c", "", "A comment to help describe the silence") - updateFlags = updateCmd.Flags() + updateCmd.Action(update) } -func update(cmd *cobra.Command, args []string) error { - if len(args) < 1 { +func update(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { + if len(*updateIds) < 1 { return fmt.Errorf("no silence IDs specified") } - alertmanagerUrl, err := GetAlertmanagerURL() - if err != nil { - return err - } - + alertmanagerUrl := GetAlertmanagerURL("/api/v1/silence") var updatedSilences []types.Silence - for _, silenceId := range args { - silence, err := getSilenceById(silenceId, *alertmanagerUrl) + for _, silenceId := range *updateIds { + silence, err := getSilenceById(silenceId, alertmanagerUrl) if err != nil { return err } @@ -63,13 +56,12 @@ func update(cmd *cobra.Command, args []string) error { updatedSilences = append(updatedSilences, *silence) } - quiet := viper.GetBool("quiet") - if quiet { + if *silenceQuiet { for _, silence := range updatedSilences { fmt.Println(silence.ID) } } else { - formatter, found := format.Formatters[viper.GetString("output")] + formatter, found := format.Formatters[*output] if !found { return fmt.Errorf("unknown output formatter") } @@ -80,7 +72,7 @@ func update(cmd *cobra.Command, args []string) error { // This takes an url.URL and not a pointer as we will modify it for our API call. func getSilenceById(silenceId string, baseUrl url.URL) (*types.Silence, error) { - baseUrl.Path = path.Join(baseUrl.Path, "/api/v1/silence", silenceId) + baseUrl.Path = path.Join(baseUrl.Path, silenceId) res, err := http.Get(baseUrl.String()) if err != nil { return nil, err @@ -105,37 +97,17 @@ func getSilenceById(silenceId string, baseUrl url.URL) (*types.Silence, error) { } func updateSilence(silence *types.Silence) (*types.Silence, error) { - if updateFlags.Changed("expires") { - expires, err := updateFlags.GetString("expires") - if err != nil { - return nil, err - } - duration, err := time.ParseDuration(expires) - if err != nil { - return nil, err - } - silence.EndsAt = time.Now().UTC().Add(duration) + if *updateExpires != 0 { + silence.EndsAt = time.Now().UTC().Add(*updateExpires) } // expire-on will override expires value if both are specified - if updateFlags.Changed("expire-on") { - expireOn, err := updateFlags.GetString("expire-on") - if err != nil { - return nil, err - } - endsAt, err := time.Parse(time.RFC3339, expireOn) - if err != nil { - return nil, err - } - silence.EndsAt = endsAt + if !(*updateExpiresOn).IsZero() { + silence.EndsAt = *updateExpiresOn } - if updateFlags.Changed("comment") { - comment, err := updateFlags.GetString("comment") - if err != nil { - return nil, err - } - silence.Comment = comment + if *comment != "" { + silence.Comment = *comment } // addSilence can also be used to update an existing silence diff --git a/cli/utils.go b/cli/utils.go index fa97e39b9e..256c6770a9 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -1,13 +1,9 @@ package cli import ( - "errors" "fmt" "net/url" - "os" - - "github.com/spf13/cobra" - "github.com/spf13/viper" + "path" "github.com/prometheus/alertmanager/pkg/parse" "github.com/prometheus/alertmanager/types" @@ -30,12 +26,12 @@ func (s ByAlphabetical) Less(i, j int) bool { return false } -func GetAlertmanagerURL() (*url.URL, error) { - u, err := url.ParseRequestURI(viper.GetString("alertmanager.url")) - if err != nil { - return nil, errors.New("invalid alertmanager url") +func GetAlertmanagerURL(p string) url.URL { + return url.URL{ + Scheme: (*alertmanagerUrl).Scheme, + Host: (*alertmanagerUrl).Host, + Path: path.Join((*alertmanagerUrl).Path, p), } - return u, nil } // Parse a list of labels (cli arguments) @@ -86,13 +82,3 @@ func TypeMatcher(matcher labels.Matcher) (types.Matcher, error) { } return *typeMatcher, nil } - -func CommandWrapper(command func(*cobra.Command, []string) error) func(*cobra.Command, []string) { - return func(cmd *cobra.Command, args []string) { - err := command(cmd, args) - if err != nil { - fmt.Printf("Error: %s\n", err) - os.Exit(1) - } - } -} diff --git a/artifacts/README.md b/cmd/amtool/README.md similarity index 55% rename from artifacts/README.md rename to cmd/amtool/README.md index af868fc181..4e1469f125 100644 --- a/artifacts/README.md +++ b/cmd/amtool/README.md @@ -1,18 +1,20 @@ # Generating amtool artifacts -Amtool comes with the option to create a number of ease-of-use artifacts. +Amtool comes with the option to create a number of ease-of-use artifacts that can be created. - go run generate_amtool_artifacts.go +## Shell completion -## Bash completion +A bash completion script can be generated by calling `amtool --completion-script-bash`. The bash completion file can be added to `/etc/bash_completion.d/`. ## Man pages +A man page can be generated by calling `amtool --help-man`. + Man pages can be added to the man directory of your choice - cp artifacts/*.1 /usr/local/share/man/man1/ + amtool --help-man > /usr/local/share/man/man1/amtool.1 sudo mandb Then you should be able to view the man pages as expected. diff --git a/vendor/github.com/alecthomas/kingpin/COPYING b/vendor/github.com/alecthomas/kingpin/COPYING new file mode 100644 index 0000000000..2993ec085d --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2014 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alecthomas/kingpin/README.md b/vendor/github.com/alecthomas/kingpin/README.md new file mode 100644 index 0000000000..fb00fac297 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/README.md @@ -0,0 +1,762 @@ +# Kingpin - A Go (golang) command line and flag parser [![](https://godoc.org/github.com/alecthomas/kingpin?status.svg)](http://godoc.org/github.com/alecthomas/kingpin) [![Build Status](https://travis-ci.org/alecthomas/kingpin.png)](https://travis-ci.org/alecthomas/kingpin) [![Gitter chat](https://badges.gitter.im/alecthomas.png)](https://gitter.im/alecthomas/Lobby) + + + +- [Overview](#overview) +- [Features](#features) +- [User-visible changes between v2 and v3](#user-visible-changes-between-v2-and-v3) + - [Some flag types have been removed.](#some-flag-types-have-been-removed) + - [Semantics around boolean flags have changed.](#semantics-around-boolean-flags-have-changed) +- [User-visible changes between v1 and v2](#user-visible-changes-between-v1-and-v2) + - [Flags can be used at any point after their definition.](#flags-can-be-used-at-any-point-after-their-definition) + - [Short flags can be combined with their parameters](#short-flags-can-be-combined-with-their-parameters) +- [API changes between v1 and v2](#api-changes-between-v1-and-v2) +- [Versions](#versions) + - [V2 is the current stable version](#v2-is-the-current-stable-version) + - [V1 is the OLD stable version](#v1-is-the-old-stable-version) +- [Change History](#change-history) +- [Examples](#examples) + - [Simple Example](#simple-example) + - [Complex Example](#complex-example) +- [Reference Documentation](#reference-documentation) + - [Displaying errors and usage information](#displaying-errors-and-usage-information) + - [Sub-commands](#sub-commands) + - [Custom Parsers](#custom-parsers) + - [Repeatable flags](#repeatable-flags) + - [Boolean values](#boolean-values) + - [Default Values](#default-values) + - [Place-holders in Help](#place-holders-in-help) + - [Consuming all remaining arguments](#consuming-all-remaining-arguments) + - [Configuration files and other external sources of flag/argument values](#configuration-files-and-other-external-sources-of-flagargument-values) + - [Struct tag interpolation](#struct-tag-interpolation) + - [Bash/ZSH Shell Completion](#bashzsh-shell-completion) + - [Additional API](#additional-api) + - [Supporting -h for help](#supporting--h-for-help) + - [Custom help](#custom-help) + - [Adding a new help command](#adding-a-new-help-command) + - [Default help template](#default-help-template) + - [Compact help template](#compact-help-template) + + + +## Overview + +Kingpin is a [fluent-style](http://en.wikipedia.org/wiki/Fluent_interface), +type-safe command-line parser. It supports flags, nested commands, and +positional arguments. + +Install it with: + + $ go get gopkg.in/alecthomas/kingpin.v2 + +It looks like this: + +```go +var ( + verbose = kingpin.Flag("verbose", "Verbose mode.").Short('v').Bool() + name = kingpin.Arg("name", "Name of user.").Required().String() +) + +func main() { + kingpin.Parse() + fmt.Printf("%v, %s\n", *verbose, *name) +} +``` + +More [examples](https://github.com/alecthomas/kingpin/tree/master/_examples) are available. + +Second to parsing, providing the user with useful help is probably the most +important thing a command-line parser does. Kingpin tries to provide detailed +contextual help if `--help` is encountered at any point in the command line +(excluding after `--`). + +## Features + +- Help output that isn't as ugly as sin. +- Fully [customisable help](#custom-help), via Go templates. +- Parsed, type-safe flags (`kingpin.Flag("f", "help").Int()`) +- Parsed, type-safe positional arguments (`kingpin.Arg("a", "help").Int()`). +- Parsed, type-safe, arbitrarily deep commands (`kingpin.Command("c", "help")`). +- Support for required flags and required positional arguments (`kingpin.Flag("f", "").Required().Int()`). +- Support for arbitrarily nested default commands (`command.Default()`). +- Callbacks per command, flag and argument (`kingpin.Command("c", "").Action(myAction)`). +- POSIX-style short flag combining (`-a -b` -> `-ab`). +- Short-flag+parameter combining (`-a parm` -> `-aparm`). +- Read command-line from files (`@`). +- Automatically generate man pages (`--help-man`). +- Negatable boolean flags (`--[no-]flag`). + +## User-visible changes between v2 and v3 + +### Some flag types have been removed. + +Some flag types had unintended side-effects, or poor usability. For example, +flags that created/opened files could result in file-descriptor leaks. To avoid +confusion these have been removed. + +### Semantics around boolean flags have changed. + +`Bool()` now creates a non-negatable flag. + +Use `NegatableBool()` to add a boolean flag that supports both `--flag` and +`--no-flag`. This will be displayed in the help. + +## User-visible changes between v1 and v2 + +### Flags can be used at any point after their definition. + +Flags can be specified at any point after their definition, not just +*immediately after their associated command*. From the chat example below, the +following used to be required: + +``` +$ chat --server=chat.server.com:8080 post --image=~/Downloads/owls.jpg pics +``` + +But the following will now work: + +``` +$ chat post --server=chat.server.com:8080 --image=~/Downloads/owls.jpg pics +``` + +### Short flags can be combined with their parameters + +Previously, if a short flag was used, any argument to that flag would have to +be separated by a space. That is no longer the case. + +## API changes between v1 and v2 + +- `ParseWithFileExpansion()` is gone. The new parser directly supports expanding `@`. +- Added `FatalUsage()` and `FatalUsageContext()` for displaying an error + usage and terminating. +- `Dispatch()` renamed to `Action()`. +- Added `ParseContext()` for parsing a command line into its intermediate context form without executing. +- Added `Terminate()` function to override the termination function. +- Added `UsageForContextWithTemplate()` for printing usage via a custom template. +- Added `UsageTemplate()` for overriding the default template to use. Two templates are included: + 1. `DefaultUsageTemplate` - default template. + 2. `CompactUsageTemplate` - compact command template for larger applications. + +## Versions + +Kingpin uses [gopkg.in](https://gopkg.in/alecthomas/kingpin) for versioning. + +The current stable version is [gopkg.in/alecthomas/kingpin.v2](https://gopkg.in/alecthomas/kingpin.v2). The previous version, [gopkg.in/alecthomas/kingpin.v1](https://gopkg.in/alecthomas/kingpin.v1), is deprecated and in maintenance mode. + +### [V2](https://gopkg.in/alecthomas/kingpin.v2) is the current stable version + +Installation: + +```sh +$ go get gopkg.in/alecthomas/kingpin.v2 +``` + +### [V1](https://gopkg.in/alecthomas/kingpin.v1) is the OLD stable version + +Installation: + +```sh +$ go get gopkg.in/alecthomas/kingpin.v1 +``` + +## Change History + +- *2015-09-19* -- Stable v2.1.0 release. + - Added `command.Default()` to specify a default command to use if no other + command matches. This allows for convenient user shortcuts. + - Exposed `HelpFlag` and `VersionFlag` for further customisation. + - `Action()` and `PreAction()` added and both now support an arbitrary + number of callbacks. + - `kingpin.SeparateOptionalFlagsUsageTemplate`. + - `--help-long` and `--help-man` (hidden by default) flags. + - Flags are "interspersed" by default, but can be disabled with `app.Interspersed(false)`. + - Added flags for all simple builtin types (int8, uint16, etc.) and slice variants. + - Use `app.Writer(os.Writer)` to specify the default writer for all output functions. + - Dropped `os.Writer` prefix from all printf-like functions. + +- *2015-05-22* -- Stable v2.0.0 release. + - Initial stable release of v2.0.0. + - Fully supports interspersed flags, commands and arguments. + - Flags can be present at any point after their logical definition. + - Application.Parse() terminates if commands are present and a command is not parsed. + - Dispatch() -> Action(). + - Actions are dispatched after all values are populated. + - Override termination function (defaults to os.Exit). + - Override output stream (defaults to os.Stderr). + - Templatised usage help, with default and compact templates. + - Make error/usage functions more consistent. + - Support argument expansion from files by default (with @). + - Fully public data model is available via .Model(). + - Parser has been completely refactored. + - Parsing and execution has been split into distinct stages. + - Use `go generate` to generate repeated flags. + - Support combined short-flag+argument: -fARG. + +- *2015-01-23* -- Stable v1.3.4 release. + - Support "--" for separating flags from positional arguments. + - Support loading flags from files (ParseWithFileExpansion()). Use @FILE as an argument. + - Add post-app and post-cmd validation hooks. This allows arbitrary validation to be added. + - A bunch of improvements to help usage and formatting. + - Support arbitrarily nested sub-commands. + +- *2014-07-08* -- Stable v1.2.0 release. + - Pass any value through to `Strings()` when final argument. + Allows for values that look like flags to be processed. + - Allow `--help` to be used with commands. + - Support `Hidden()` flags. + - Parser for [units.Base2Bytes](https://github.com/alecthomas/units) + type. Allows for flags like `--ram=512MB` or `--ram=1GB`. + - Add an `Enum()` value, allowing only one of a set of values + to be selected. eg. `Flag(...).Enum("debug", "info", "warning")`. + +- *2014-06-27* -- Stable v1.1.0 release. + - Bug fixes. + - Always return an error (rather than panicing) when misconfigured. + - `OpenFile(flag, perm)` value type added, for finer control over opening files. + - Significantly improved usage formatting. + +- *2014-06-19* -- Stable v1.0.0 release. + - Support [cumulative positional](#consuming-all-remaining-arguments) arguments. + - Return error rather than panic when there are fatal errors not caught by + the type system. eg. when a default value is invalid. + - Use gokpg.in. + +- *2014-06-10* -- Place-holder streamlining. + - Renamed `MetaVar` to `PlaceHolder`. + - Removed `MetaVarFromDefault`. Kingpin now uses [heuristics](#place-holders-in-help) + to determine what to display. + +## Examples + +### Simple Example + +Kingpin can be used for simple flag+arg applications like so: + +``` +$ ping --help +usage: ping [] [] + +Flags: + --debug Enable debug mode. + --help Show help. + -t, --timeout=5s Timeout waiting for ping. + +Args: + IP address to ping. + [] Number of packets to send +$ ping 1.2.3.4 5 +Would ping: 1.2.3.4 with timeout 5s and count 0 +``` + +From the following source: + +```go +package main + +import ( + "fmt" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + debug = kingpin.Flag("debug", "Enable debug mode.").Bool() + timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").Default("5s").OverrideDefaultFromEnvar("PING_TIMEOUT").Short('t').Duration() + ip = kingpin.Arg("ip", "IP address to ping.").Required().IP() + count = kingpin.Arg("count", "Number of packets to send").Int() +) + +func main() { + kingpin.Version("0.0.1") + kingpin.Parse() + fmt.Printf("Would ping: %s with timeout %s and count %d", *ip, *timeout, *count) +} +``` + +### Complex Example + +Kingpin can also produce complex command-line applications with global flags, +subcommands, and per-subcommand flags, like this: + +``` +$ chat --help +usage: chat [] [] [ ...] + +A command-line chat application. + +Flags: + --help Show help. + --debug Enable debug mode. + --server=127.0.0.1 Server address. + +Commands: + help [] + Show help for a command. + + register + Register a new user. + + post [] [] + Post a message to a channel. + +$ chat help post +usage: chat [] post [] [] + +Post a message to a channel. + +Flags: + --image=IMAGE Image to post. + +Args: + Channel to post to. + [] Text to post. + +$ chat post --image=~/Downloads/owls.jpg pics +... +``` + +From this code: + +```go +package main + +import ( + "os" + "strings" + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + app = kingpin.New("chat", "A command-line chat application.") + debug = app.Flag("debug", "Enable debug mode.").Bool() + serverIP = app.Flag("server", "Server address.").Default("127.0.0.1").IP() + + register = app.Command("register", "Register a new user.") + registerNick = register.Arg("nick", "Nickname for user.").Required().String() + registerName = register.Arg("name", "Name of user.").Required().String() + + post = app.Command("post", "Post a message to a channel.") + postImage = post.Flag("image", "Image to post.").File() + postChannel = post.Arg("channel", "Channel to post to.").Required().String() + postText = post.Arg("text", "Text to post.").Strings() +) + +func main() { + switch kingpin.MustParse(app.Parse(os.Args[1:])) { + // Register user + case register.FullCommand(): + println(*registerNick) + + // Post message + case post.FullCommand(): + if *postImage != nil { + } + text := strings.Join(*postText, " ") + println("Post:", text) + } +} +``` + +## Reference Documentation + +### Displaying errors and usage information + +Kingpin exports a set of functions to provide consistent errors and usage +information to the user. + +Error messages look something like this: + + : error: + +The functions on `Application` are: + +Function | Purpose +---------|-------------- +`Errorf(format, args)` | Display a printf formatted error to the user. +`Fatalf(format, args)` | As with Errorf, but also call the termination handler. +`FatalUsage(format, args)` | As with Fatalf, but also print contextual usage information. +`FatalUsageContext(context, format, args)` | As with Fatalf, but also print contextual usage information from a `ParseContext`. +`FatalIfError(err, format, args)` | Conditionally print an error prefixed with format+args, then call the termination handler + +There are equivalent global functions in the kingpin namespace for the default +`kingpin.CommandLine` instance. + +### Sub-commands + +Kingpin supports nested sub-commands, with separate flag and positional +arguments per sub-command. Note that positional arguments may only occur after +sub-commands. + +For example: + +```go +var ( + deleteCommand = kingpin.Command("delete", "Delete an object.") + deleteUserCommand = deleteCommand.Command("user", "Delete a user.") + deleteUserUIDFlag = deleteUserCommand.Flag("uid", "Delete user by UID rather than username.") + deleteUserUsername = deleteUserCommand.Arg("username", "Username to delete.") + deletePostCommand = deleteCommand.Command("post", "Delete a post.") +) + +func main() { + switch kingpin.Parse() { + case "delete user": + case "delete post": + } +} +``` + +### Custom Parsers + +Kingpin supports both flag and positional argument parsers for converting to +Go types. For example, some included parsers are `Int()`, `Float()`, +`Duration()` and `ExistingFile()`. + +Parsers conform to Go's [`flag.Value`](http://godoc.org/flag#Value) +interface, so any existing implementations will work. + +For example, a parser for accumulating HTTP header values might look like this: + +```go +type HTTPHeaderValue http.Header + +func (h *HTTPHeaderValue) Set(value string) error { + parts := strings.SplitN(value, ":", 2) + if len(parts) != 2 { + return fmt.Errorf("expected HEADER:VALUE got '%s'", value) + } + (*http.Header)(h).Add(parts[0], parts[1]) + return nil +} + +func (h *HTTPHeaderValue) String() string { + return "" +} +``` + +As a convenience, I would recommend something like this: + +```go +func HTTPHeader(s Settings) (target *http.Header) { + target = &http.Header{} + s.SetValue((*HTTPHeaderValue)(target)) + return +} +``` + +You would use it like so: + +```go +headers = HTTPHeader(kingpin.Flag("header", "Add a HTTP header to the request.").Short('H')) +``` + +### Repeatable flags + +Depending on the `Value` they hold, some flags may be repeated. The +`IsCumulative() bool` function on `Value` tells if it's safe to call `Set()` +multiple times or if an error should be raised if several values are passed. + +The built-in `Value`s returning slices and maps, as well as `Counter` are +examples of `Value`s that make a flag repeatable. + +### Boolean values + +Boolean values are uniquely managed by Kingpin. Each boolean flag will have a negative complement: +`--` and `--no-`. + +### Default Values + +The default value is the zero value for a type. This can be overridden with +the `Default(value...)` function on flags and arguments. This function accepts +one or several strings, which are parsed by the value itself, so they *must* +be compliant with the format expected. + +### Place-holders in Help + +The place-holder value for a flag is the value used in the help to describe +the value of a non-boolean flag. + +The value provided to PlaceHolder() is used if provided, then the value +provided by Default() if provided, then finally the capitalised flag name is +used. + +Here are some examples of flags with various permutations: + + --name=NAME // Flag(...).String() + --name="Harry" // Flag(...).Default("Harry").String() + --name=FULL-NAME // flag(...).PlaceHolder("FULL-NAME").Default("Harry").String() + +### Consuming all remaining arguments + +A common command-line idiom is to use all remaining arguments for some +purpose. eg. The following command accepts an arbitrary number of +IP addresses as positional arguments: + + ./cmd ping 10.1.1.1 192.168.1.1 + +Such arguments are similar to [repeatable flags](#repeatable-flags), but for +arguments. Therefore they use the same `IsCumulative() bool` function on the +underlying `Value`, so the built-in `Value`s for which the `Set()` function +can be called several times will consume multiple arguments. + +To implement the above example with a custom `Value`, we might do something +like this: + +```go +type ipList []net.IP + +func (i *ipList) Set(value string) error { + if ip := net.ParseIP(value); ip == nil { + return fmt.Errorf("'%s' is not an IP address", value) + } else { + *i = append(*i, ip) + return nil + } +} + +func (i *ipList) String() string { + return "" +} + +func (i *ipList) IsCumulative() bool { + return true +} + +func IPList(s Settings) (target *[]net.IP) { + target = new([]net.IP) + s.SetValue((*ipList)(target)) + return +} +``` + +And use it like so: + +```go +ips := IPList(kingpin.Arg("ips", "IP addresses to ping.")) +``` + +### Configuration files and other external sources of flag/argument values + +Kingpin v3 now supports +[custom value resolvers](https://godoc.org/gopkg.in/alecthomas/kingpin.v3-unstable#Resolver) +for flags and arguments. This is used internally to resolve default values and environment variables, but +applications can define their own resolvers to load flags from configuration files, or elsewhere. + +Included is a [JSONResolver](https://godoc.org/gopkg.in/alecthomas/kingpin.v3-unstable#JSONResolver) that +loads values from a JSON file, including multi-value flags. + +### Struct tag interpolation + +Kingpin v3 now supports defining flags, arguments and commands via +struct reflection. If desired, this can (almost) completely replace +the fluent-style interface. + +The name of the flag will default to the CamelCase name transformed to camel- +case. This can be overridden with the "long" tag. + +All basic Go types are supported including floats, ints, strings, +time.Duration, and slices of same. + +For compatibility, also supports the tags used by https://github.com/jessevdk/go-flags + +```go +type MyFlags struct { + Arg string `arg:"true" help:"An argument"` + Debug bool `help:"Enable debug mode."` + URL string `help:"URL to connect to." default:"localhost:80"` + + Login struct { + Name string `help:"Username to authenticate with." args:"true"` + } `help:"Login to server."` +} + +flags := &MyFlags{} +kingpin.Struct(flags) +``` + +Supported struct tags are: + +| Tag | Description | +| --------------- | ------------------------------------------- | +| `help` | Help text. | +| `placeholder` | Placeholder text. | +| `default` | Default value. | +| `short` | Short name, if flag. | +| `long` | Long name, for overriding field name. | +| `required` | If present, flag/arg is required. | +| `hidden` | If present, flag/arg/command is hidden. | +| `enum` | For enums, a comma separated list of cases. | +| `arg` | If true, field is an argument. | + +### Bash/ZSH Shell Completion + +By default, all flags and commands/subcommands generate completions +internally. + +Out of the box, CLI tools using kingpin should be able to take advantage +of completion hinting for flags and commands. By specifying +`--completion-bash` as the first argument, your CLI tool will show +possible subcommands. By ending your argv with `--`, hints for flags +will be shown. + +To allow your end users to take advantage you must package a +`/etc/bash_completion.d` script with your distribution (or the equivalent +for your target platform/shell). An alternative is to instruct your end +user to source a script from their `bash_profile` (or equivalent). + +Fortunately Kingpin makes it easy to generate or source a script for use +with end users shells. `./yourtool --completion-script-bash` and +`./yourtool --completion-script-zsh` will generate these scripts for you. + +**Installation by Package** + +For the best user experience, you should bundle your pre-created +completion script with your CLI tool and install it inside +`/etc/bash_completion.d` (or equivalent). A good suggestion is to add +this as an automated step to your build pipeline, in the implementation +is improved for bug fixed. + +**Installation by `bash_profile`** + +Alternatively, instruct your users to add an additional statement to +their `bash_profile` (or equivalent): + +``` +eval "$(your-cli-tool --completion-script-bash)" +``` + +Or for ZSH + +``` +eval "$(your-cli-tool --completion-script-zsh)" +``` + +#### Additional API +To provide more flexibility, a completion option API has been +exposed for flags to allow user defined completion options, to extend +completions further than just EnumVar/Enum. + + +**Provide Static Options** + +When using an `Enum` or `EnumVar`, users are limited to only the options +given. Maybe we wish to hint possible options to the user, but also +allow them to provide their own custom option. `HintOptions` gives +this functionality to flags. + +``` +app := kingpin.New("completion", "My application with bash completion.") +app.Flag("port", "Provide a port to connect to"). + Required(). + HintOptions("80", "443", "8080"). + IntVar(&c.port) +``` + +**Provide Dynamic Options** + +Consider the case that you needed to read a local database or a file to +provide suggestions. You can dynamically generate the options + +``` +func listHosts(args []string) []string { + // Provide a dynamic list of hosts from a hosts file or otherwise + // for bash completion. In this example we simply return static slice. + + // You could use this functionality to reach into a hosts file to provide + // completion for a list of known hosts. + return []string{"sshhost.example", "webhost.example", "ftphost.example"} +} + +app := kingpin.New("completion", "My application with bash completion.") +app.Flag("flag-1", "").HintAction(listHosts).String() +``` + +**EnumVar/Enum** + +When using `Enum` or `EnumVar`, any provided options will be automatically +used for bash autocompletion. However, if you wish to provide a subset or +different options, you can use `HintOptions` or `HintAction` which will override +the default completion options for `Enum`/`EnumVar`. + + +**Examples** + +You can see an in depth example of the completion API within +`examples/completion/main.go` + + +### Supporting -h for help + +`kingpin.CommandLine.GetFlag("help").Short('h')` + +### Custom help + +Kingpin supports templatised help using the text/template library. + +You can specify the template to use with the [Application.UsageTemplate()](http://godoc.org/gopkg.in/alecthomas/kingpin.v2#Application.UsageTemplate) function. + +There are four included templates: `kingpin.DefaultUsageTemplate` is the +default, `kingpin.CompactUsageTemplate` provides a more compact representation +for more complex command-line structures, and `kingpin.ManPageTemplate` is +used to generate man pages. + +See the above templates for examples of usage, and the the function [UsageForContextWithTemplate()](https://github.com/alecthomas/kingpin/blob/master/usage.go#L198) method for details on the context. + +#### Adding a new help command + +It is often useful to add extra help formats, such as man pages, etc. Here's how you'd add a `--help-man` flag: + +```go +kingpin.Flag("help-man", "Generate man page."). + UsageActionTemplate(kingpin.ManPageTemplate). + Bool() +``` + +#### Default help template + +``` +$ go run ./examples/curl/curl.go --help +usage: curl [] [ ...] + +An example implementation of curl. + +Flags: + --help Show help. + -t, --timeout=5s Set connection timeout. + -H, --headers=HEADER=VALUE + Add HTTP headers to the request. + +Commands: + help [...] + Show help. + + get url + Retrieve a URL. + + get file + Retrieve a file. + + post [] + POST a resource. +``` + +#### Compact help template + +``` +$ go run ./examples/curl/curl.go --help +usage: curl [] [ ...] + +An example implementation of curl. + +Flags: + --help Show help. + -t, --timeout=5s Set connection timeout. + -H, --headers=HEADER=VALUE + Add HTTP headers to the request. + +Commands: + help [...] + get [] + url + file + post [] +``` diff --git a/vendor/github.com/alecthomas/kingpin/actions.go b/vendor/github.com/alecthomas/kingpin/actions.go new file mode 100644 index 0000000000..98d041f01a --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/actions.go @@ -0,0 +1,45 @@ +package kingpin + +// Action callback triggered during parsing. +// +// "element" is the flag, argument or command associated with the callback. It contains the Clause +// and string value, if any. +// +// "context" contains the full parse context, including all other elements that have been parsed. +type Action func(element *ParseElement, context *ParseContext) error + +type actionApplier interface { + applyActions(*ParseElement, *ParseContext) error + applyPreActions(*ParseElement, *ParseContext) error +} + +type actionMixin struct { + actions []Action + preActions []Action +} + +func (a *actionMixin) addAction(action Action) { + a.actions = append(a.actions, action) +} + +func (a *actionMixin) addPreAction(action Action) { + a.actions = append(a.actions, action) +} + +func (a *actionMixin) applyActions(element *ParseElement, context *ParseContext) error { + for _, action := range a.actions { + if err := action(element, context); err != nil { + return err + } + } + return nil +} + +func (a *actionMixin) applyPreActions(element *ParseElement, context *ParseContext) error { + for _, preAction := range a.preActions { + if err := preAction(element, context); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/alecthomas/kingpin/app.go b/vendor/github.com/alecthomas/kingpin/app.go new file mode 100644 index 0000000000..3fdbd74e9f --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/app.go @@ -0,0 +1,678 @@ +package kingpin + +import ( + "fmt" + "io" + "os" + "strings" +) + +var ( + errCommandNotSpecified = TError("command not specified") +) + +// An Application contains the definitions of flags, arguments and commands +// for an application. +type Application struct { + cmdMixin + initialized bool + + Name string + Help string + + author string + version string + output io.Writer // Destination for usage. + errors io.Writer + terminate func(status int) // See Terminate() + noInterspersed bool // can flags be interspersed with args (or must they come first) + envarSeparator string + defaultEnvars bool + resolvers []Resolver + completion bool + helpFlag *Clause + helpCommand *CmdClause + defaultUsage *UsageContext +} + +// New creates a new Kingpin application instance. +func New(name, help string) *Application { + a := &Application{ + Name: name, + Help: help, + output: os.Stdout, + errors: os.Stderr, + terminate: os.Exit, + envarSeparator: string(os.PathListSeparator), + defaultUsage: &UsageContext{ + Template: DefaultUsageTemplate, + }, + } + a.flagGroup = newFlagGroup() + a.argGroup = newArgGroup() + a.cmdGroup = newCmdGroup(a) + a.helpFlag = a.Flag("help", T("Show context-sensitive help.")).Action(func(e *ParseElement, c *ParseContext) error { + c.Application.UsageForContext(c) + c.Application.terminate(0) + return nil + }) + a.helpFlag.Bool() + a.Flag("completion-bash", T("Output possible completions for the given args.")).Hidden().BoolVar(&a.completion) + a.Flag("completion-script-bash", T("Generate completion script for bash.")).Hidden().PreAction(a.generateBashCompletionScript).Bool() + a.Flag("completion-script-zsh", T("Generate completion script for ZSH.")).Hidden().PreAction(a.generateZSHCompletionScript).Bool() + + return a +} + +// Struct allows applications to define flags with struct tags. +// +// Supported struct tags are: help, placeholder, default, short, long, required, hidden, env, +// enum, and arg. +// +// The name of the flag will default to the CamelCase name transformed to camel-case. This can +// be overridden with the "long" tag. +// +// All basic Go types are supported including floats, ints, strings, time.Duration, +// and slices of same. +// +// For compatibility, also supports the tags used by https://github.com/jessevdk/go-flags +func (a *Application) Struct(v interface{}) error { + return a.fromStruct(nil, v) +} + +func (a *Application) generateBashCompletionScript(e *ParseElement, c *ParseContext) error { + usageContext := &UsageContext{ + Template: BashCompletionTemplate, + } + a.Writers(os.Stdout, os.Stderr) + if err := a.UsageForContextWithTemplate(usageContext, c); err != nil { + return err + } + a.terminate(0) + return nil +} + +func (a *Application) generateZSHCompletionScript(e *ParseElement, c *ParseContext) error { + usageContext := &UsageContext{ + Template: ZshCompletionTemplate, + } + a.Writers(os.Stdout, os.Stderr) + if err := a.UsageForContextWithTemplate(usageContext, c); err != nil { + return err + } + a.terminate(0) + return nil +} + +// Action is an application-wide callback. It is used in two situations: first, with a nil "element" +// parameter when parsing is complete, and second whenever a command, argument or flag is +// encountered. +func (a *Application) Action(action Action) *Application { + a.addAction(action) + return a +} + +// PreAction is an application-wide callback. It is in two situations: first, with a nil "element" +// parameter, and second, whenever a command, argument or flag is encountered. +func (a *Application) PreAction(action Action) *Application { + a.addPreAction(action) + return a +} + +// DefaultEnvars configures all flags (that do not already have an associated +// envar) to use a default environment variable in the form "_". +// +// For example, if the application is named "foo" and a flag is named "bar- +// waz" the environment variable: "FOO_BAR_WAZ". +func (a *Application) DefaultEnvars() *Application { + a.defaultEnvars = true + return a +} + +// EnvarSeparator sets the string that is used for separating values in environment variables. +// +// This defaults to the current OS's path list separator (typically : or ;). +func (a *Application) EnvarSeparator(sep string) *Application { + a.envarSeparator = sep + return a + +} + +// Resolver adds an ordered set of flag/argument resolvers. +// +// Resolvers provide default flag/argument values, from environment variables, configuration files, etc. Multiple +// resolvers may be added, and they are processed in order. +// +// The last Resolver to return a value always wins. Values returned from resolvers are not cumulative. +func (a *Application) Resolver(resolvers ...Resolver) *Application { + a.resolvers = append(a.resolvers, resolvers...) + return a +} + +// Terminate specifies the termination handler. Defaults to os.Exit(status). +// If nil is passed, a no-op function will be used. +func (a *Application) Terminate(terminate func(int)) *Application { + if terminate == nil { + terminate = func(int) {} + } + a.terminate = terminate + return a +} + +// Writers specifies the writers to use for usage and errors. Defaults to os.Stderr. +func (a *Application) Writers(out, err io.Writer) *Application { + a.output = out + a.errors = err + return a +} + +// UsageTemplate specifies the text template to use when displaying usage +// information via --help. The default is DefaultUsageTemplate. +func (a *Application) UsageTemplate(template string) *Application { + a.defaultUsage.Template = template + return a +} + +// UsageContext specifies the UsageContext to use when displaying usage +// information via --help. +func (a *Application) UsageContext(context *UsageContext) *Application { + a.defaultUsage = context + return a +} + +// ParseContext parses the given command line and returns the fully populated +// ParseContext. +func (a *Application) ParseContext(args []string) (*ParseContext, error) { + return a.parseContext(false, args) +} + +func (a *Application) parseContext(ignoreDefault bool, args []string) (*ParseContext, error) { + if err := a.init(); err != nil { + return nil, err + } + context := tokenize(args, ignoreDefault, a.buildResolvers()) + context.Application = a + err := parse(context, a) + return context, err +} + +// Build resolvers to emulate the envar and defaults behaviour that was previously hard-coded. +func (a *Application) buildResolvers() []Resolver { + + // .Default() has lowest priority... + resolvers := []Resolver{defaultsResolver()} + // Then custom resolvers... + resolvers = append(resolvers, a.resolvers...) + // Finally, envars are highest priority behind direct flag parsing. + if a.defaultEnvars { + resolvers = append(resolvers, PrefixedEnvarResolver(a.Name+"_", a.envarSeparator)) + } + resolvers = append(resolvers, envarResolver(a.envarSeparator)) + + return resolvers +} + +// Parse parses command-line arguments. It returns the selected command and an +// error. The selected command will be a space separated subcommand, if +// subcommands have been configured. +// +// This will populate all flag and argument values, call all callbacks, and so +// on. +func (a *Application) Parse(args []string) (command string, err error) { + context, parseErr := a.ParseContext(args) + if context == nil { + // Since we do not throw error immediately, there could be a case + // where a context returns nil. Protect against that. + return "", parseErr + } + + if err = a.setDefaults(context); err != nil { + return "", err + } + + selected, setValuesErr := a.setValues(context) + + if err = a.applyPreActions(context, !a.completion); err != nil { + return "", err + } + + if a.completion { + a.generateBashCompletion(context) + a.terminate(0) + } else { + if parseErr != nil { + return "", parseErr + } + + a.maybeHelp(context) + if !context.EOL() { + return "", TError("unexpected argument '{{.Arg0}}'", V{"Arg0": context.Peek()}) + } + + if setValuesErr != nil { + return "", setValuesErr + } + + command, err = a.execute(context, selected) + if err == errCommandNotSpecified { + a.writeUsage(context, nil) + } + } + return command, err +} + +func (a *Application) writeUsage(context *ParseContext, err error) { + if err != nil { + a.Errorf("%s", err) + } + if err := a.UsageForContext(context); err != nil { + panic(err) + } + a.terminate(1) +} + +func (a *Application) maybeHelp(context *ParseContext) { + for _, element := range context.Elements { + if element.OneOf.Flag == a.helpFlag { + // Re-parse the command-line ignoring defaults, so that help works correctly. + context, _ = a.parseContext(true, context.rawArgs) + a.writeUsage(context, nil) + } + } +} + +// Version adds a --version flag for displaying the application version. +func (a *Application) Version(version string) *Application { + a.version = version + a.Flag("version", T("Show application version.")). + PreAction(func(*ParseElement, *ParseContext) error { + fmt.Fprintln(a.output, version) + a.terminate(0) + return nil + }). + Bool() + return a +} + +// Author sets the author name for usage templates. +func (a *Application) Author(author string) *Application { + a.author = author + return a +} + +// Command adds a new top-level command. +func (a *Application) Command(name, help string) *CmdClause { + return a.addCommand(name, help) +} + +// Interspersed control if flags can be interspersed with positional arguments +// +// true (the default) means that they can, false means that all the flags must appear before the first positional arguments. +func (a *Application) Interspersed(interspersed bool) *Application { + a.noInterspersed = !interspersed + return a +} + +func (a *Application) init() error { + if a.initialized { + return nil + } + if err := a.checkArgCommandMixing(); err != nil { + return err + } + + // If we have subcommands, add a help command at the top-level. + if a.cmdGroup.have() { + var command []string + a.helpCommand = a.Command("help", T("Show help.")). + PreAction(func(element *ParseElement, context *ParseContext) error { + a.Usage(command) + command = []string{} + a.terminate(0) + return nil + }) + a.helpCommand. + Arg("command", T("Show help on command.")). + StringsVar(&command) + // Make help first command. + l := len(a.commandOrder) + a.commandOrder = append(a.commandOrder[l-1:l], a.commandOrder[:l-1]...) + } + + if err := a.flagGroup.init(); err != nil { + return err + } + if err := a.cmdGroup.init(); err != nil { + return err + } + if err := a.argGroup.init(); err != nil { + return err + } + for _, cmd := range a.commands { + if err := cmd.init(); err != nil { + return err + } + } + flagGroups := []*flagGroup{a.flagGroup} + for _, cmd := range a.commandOrder { + if err := checkDuplicateFlags(cmd, flagGroups); err != nil { + return err + } + } + a.initialized = true + return nil +} + +// Recursively check commands for duplicate flags. +func checkDuplicateFlags(current *CmdClause, flagGroups []*flagGroup) error { + // Check for duplicates. + for _, flags := range flagGroups { + for _, flag := range current.flagOrder { + if flag.shorthand != 0 { + if _, ok := flags.short[string(flag.shorthand)]; ok { + return TError("duplicate short flag -{{.Arg0}}", V{"Arg0": flag.shorthand}) + } + } + if _, ok := flags.long[flag.name]; ok { + return TError("duplicate long flag --{{.Arg0}}", V{"Arg0": flag.name}) + } + } + } + flagGroups = append(flagGroups, current.flagGroup) + // Check subcommands. + for _, subcmd := range current.commandOrder { + if err := checkDuplicateFlags(subcmd, flagGroups); err != nil { + return err + } + } + return nil +} + +func (a *Application) execute(context *ParseContext, selected []string) (string, error) { + var err error + + if err = a.validateRequired(context); err != nil { + return "", err + } + + if err = a.applyActions(context); err != nil { + return "", err + } + + command := strings.Join(selected, " ") + if command == "" && a.cmdGroup.have() { + return "", errCommandNotSpecified + } + return command, err +} + +func (a *Application) setDefaults(context *ParseContext) error { + flagElements := context.Elements.FlagMap() + argElements := context.Elements.ArgMap() + + // Check required flags and set defaults. + for _, flag := range context.flags.long { + if flagElements[flag.name] == nil { + if err := flag.setDefault(context); err != nil { + return err + } + } else { + flag.reset() + } + } + + for _, arg := range context.arguments.args { + if argElements[arg.name] == nil { + if err := arg.setDefault(context); err != nil { + return err + } + } else { + arg.reset() + } + } + + return nil +} + +func (a *Application) validateRequired(context *ParseContext) error { + flagElements := context.Elements.FlagMap() + argElements := context.Elements.ArgMap() + + // Check required flags and set defaults. + for _, flag := range context.flags.long { + if flagElements[flag.name] == nil { + // Check required flags were provided. + if flag.needsValue(context) { + return TError("required flag --{{.Arg0}} not provided", V{"Arg0": flag.name}) + } + } + } + + for _, arg := range context.arguments.args { + if argElements[arg.name] == nil { + if arg.needsValue(context) { + return TError("required argument '{{.Arg0}}' not provided", V{"Arg0": arg.name}) + } + } + } + return nil +} + +func (a *Application) setValues(context *ParseContext) (selected []string, err error) { + // Set all arg and flag values. + var ( + lastCmd *CmdClause + flagSet = map[string]struct{}{} + ) + for _, element := range context.Elements { + switch { + case element.OneOf.Flag != nil: + clause := element.OneOf.Flag + if _, ok := flagSet[clause.name]; ok { + if v, ok := clause.value.(cumulativeValue); !ok || !v.IsCumulative() { + return nil, TError("flag '{{.Arg0}}' cannot be repeated", V{"Arg0": clause.name}) + } + } + if err = clause.value.Set(*element.Value); err != nil { + return + } + flagSet[clause.name] = struct{}{} + + case element.OneOf.Arg != nil: + clause := element.OneOf.Arg + if err = clause.value.Set(*element.Value); err != nil { + return + } + + case element.OneOf.Cmd != nil: + clause := element.OneOf.Cmd + if clause.validator != nil { + if err = clause.validator(clause); err != nil { + return + } + } + selected = append(selected, clause.name) + lastCmd = clause + } + } + + if lastCmd == nil || lastCmd.optionalSubcommands { + return + } + if len(lastCmd.commands) > 0 { + return nil, TError("must select a subcommand of '{{.Arg0}}'", V{"Arg0": lastCmd.FullCommand()}) + } + + return +} + +// Errorf prints an error message to w in the format ": error: ". +func (a *Application) Errorf(format string, args ...interface{}) { + fmt.Fprintf(a.errors, a.Name+T(": error: ")+format+"\n", args...) +} + +// Fatalf writes a formatted error to w then terminates with exit status 1. +func (a *Application) Fatalf(format string, args ...interface{}) { + a.Errorf(format, args...) + a.terminate(1) +} + +// FatalUsage prints an error message followed by usage information, then +// exits with a non-zero status. +func (a *Application) FatalUsage(format string, args ...interface{}) { + a.Errorf(format, args...) + a.Usage([]string{}) + a.terminate(1) +} + +// FatalUsageContext writes a printf formatted error message to w, then usage +// information for the given ParseContext, before exiting. +func (a *Application) FatalUsageContext(context *ParseContext, format string, args ...interface{}) { + a.Errorf(format, args...) + if err := a.UsageForContext(context); err != nil { + panic(err) + } + a.terminate(1) +} + +// FatalIfError prints an error and exits if err is not nil. The error is printed +// with the given formatted string, if any. +func (a *Application) FatalIfError(err error, format string, args ...interface{}) { + if err != nil { + prefix := "" + if format != "" { + prefix = fmt.Sprintf(format, args...) + ": " + } + a.Errorf(prefix+"%s", err) + a.terminate(1) + } +} + +func (a *Application) completionOptions(context *ParseContext) []string { + args := context.rawArgs + + var ( + currArg string + prevArg string + target cmdMixin + ) + + numArgs := len(args) + if numArgs > 1 { + args = args[1:] + currArg = args[len(args)-1] + } + if numArgs > 2 { + prevArg = args[len(args)-2] + } + + target = a.cmdMixin + if context.SelectedCommand != nil { + // A subcommand was in use. We will use it as the target + target = context.SelectedCommand.cmdMixin + } + + if (currArg != "" && strings.HasPrefix(currArg, "--")) || strings.HasPrefix(prevArg, "--") { + // Perform completion for A flag. The last/current argument started with "-" + var ( + flagName string // The name of a flag if given (could be half complete) + flagValue string // The value assigned to a flag (if given) (could be half complete) + ) + + if strings.HasPrefix(prevArg, "--") && !strings.HasPrefix(currArg, "--") { + // Matches: ./myApp --flag value + // Wont Match: ./myApp --flag -- + flagName = prevArg[2:] // Strip the "--" + flagValue = currArg + } else if strings.HasPrefix(currArg, "--") { + // Matches: ./myApp --flag -- + // Matches: ./myApp --flag somevalue -- + // Matches: ./myApp -- + flagName = currArg[2:] // Strip the "--" + } + + options, flagMatched, valueMatched := target.FlagCompletion(flagName, flagValue) + if valueMatched { + // Value Matched. Show cmdCompletions + return target.CmdCompletion(context) + } + + // Add top level flags if we're not at the top level and no match was found. + if context.SelectedCommand != nil && !flagMatched { + topOptions, topFlagMatched, topValueMatched := a.FlagCompletion(flagName, flagValue) + if topValueMatched { + // Value Matched. Back to cmdCompletions + return target.CmdCompletion(context) + } + + if topFlagMatched { + // Top level had a flag which matched the input. Return it's options. + options = topOptions + } else { + // Add top level flags + options = append(options, topOptions...) + } + } + return options + } + + // Perform completion for sub commands and arguments. + return target.CmdCompletion(context) +} + +func (a *Application) generateBashCompletion(context *ParseContext) { + options := a.completionOptions(context) + fmt.Printf("%s", strings.Join(options, "\n")) +} + +func (a *Application) applyPreActions(context *ParseContext, dispatch bool) error { + if !dispatch { + return nil + } + if err := a.actionMixin.applyPreActions(nil, context); err != nil { + return err + } + for _, element := range context.Elements { + if err := a.actionMixin.applyPreActions(element, context); err != nil { + return err + } + var applier actionApplier + switch { + case element.OneOf.Arg != nil: + applier = element.OneOf.Arg + case element.OneOf.Flag != nil: + applier = element.OneOf.Flag + case element.OneOf.Cmd != nil: + applier = element.OneOf.Cmd + } + if err := applier.applyPreActions(element, context); err != nil { + return err + } + } + return nil +} + +func (a *Application) applyActions(context *ParseContext) error { + if err := a.actionMixin.applyActions(nil, context); err != nil { + return err + } + // Dispatch to actions. + for _, element := range context.Elements { + if err := a.actionMixin.applyActions(element, context); err != nil { + return err + } + var applier actionApplier + switch { + case element.OneOf.Arg != nil: + applier = element.OneOf.Arg + case element.OneOf.Flag != nil: + applier = element.OneOf.Flag + case element.OneOf.Cmd != nil: + applier = element.OneOf.Cmd + } + if err := applier.applyActions(element, context); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/alecthomas/kingpin/args.go b/vendor/github.com/alecthomas/kingpin/args.go new file mode 100644 index 0000000000..3846b480d0 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/args.go @@ -0,0 +1,60 @@ +package kingpin + +type argGroup struct { + args []*Clause +} + +func newArgGroup() *argGroup { + return &argGroup{} +} + +func (a *argGroup) have() bool { + return len(a.args) > 0 +} + +// GetArg gets an argument definition. +// +// This allows existing arguments to be modified after definition but before parsing. Useful for +// modular applications. +func (a *argGroup) GetArg(name string) *Clause { + for _, arg := range a.args { + if arg.name == name { + return arg + } + } + return nil +} + +func (a *argGroup) Arg(name, help string) *Clause { + arg := NewClause(name, help) + a.args = append(a.args, arg) + return arg +} + +func (a *argGroup) init() error { + required := 0 + seen := map[string]struct{}{} + previousArgMustBeLast := false + for i, arg := range a.args { + if previousArgMustBeLast { + return TError("Args() can't be followed by another argument '{{.Arg0}}'", V{"Arg0": arg.name}) + } + if arg.consumesRemainder() { + previousArgMustBeLast = true + } + if _, ok := seen[arg.name]; ok { + return TError("duplicate argument '{{.Arg0}}'", V{"Arg0": arg.name}) + } + seen[arg.name] = struct{}{} + if arg.required && required != i { + return TError("required arguments found after non-required") + } + if arg.required { + required++ + } + if err := arg.init(); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/alecthomas/kingpin/camelcase.go b/vendor/github.com/alecthomas/kingpin/camelcase.go new file mode 100644 index 0000000000..5f7e513fb7 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/camelcase.go @@ -0,0 +1,90 @@ +package kingpin + +// NOTE: This code is from https://github.com/fatih/camelcase. MIT license. + +import ( + "unicode" + "unicode/utf8" +) + +// Split splits the camelcase word and returns a list of words. It also +// supports digits. Both lower camel case and upper camel case are supported. +// For more info please check: http://en.wikipedia.org/wiki/CamelCase +// +// Examples +// +// "" => [""] +// "lowercase" => ["lowercase"] +// "Class" => ["Class"] +// "MyClass" => ["My", "Class"] +// "MyC" => ["My", "C"] +// "HTML" => ["HTML"] +// "PDFLoader" => ["PDF", "Loader"] +// "AString" => ["A", "String"] +// "SimpleXMLParser" => ["Simple", "XML", "Parser"] +// "vimRPCPlugin" => ["vim", "RPC", "Plugin"] +// "GL11Version" => ["GL", "11", "Version"] +// "99Bottles" => ["99", "Bottles"] +// "May5" => ["May", "5"] +// "BFG9000" => ["BFG", "9000"] +// "BöseÜberraschung" => ["Böse", "Überraschung"] +// "Two spaces" => ["Two", " ", "spaces"] +// "BadUTF8\xe2\xe2\xa1" => ["BadUTF8\xe2\xe2\xa1"] +// +// Splitting rules +// +// 1) If string is not valid UTF-8, return it without splitting as +// single item array. +// 2) Assign all unicode characters into one of 4 sets: lower case +// letters, upper case letters, numbers, and all other characters. +// 3) Iterate through characters of string, introducing splits +// between adjacent characters that belong to different sets. +// 4) Iterate through array of split strings, and if a given string +// is upper case: +// if subsequent string is lower case: +// move last character of upper case string to beginning of +// lower case string +func camelCase(src string) (entries []string) { + // don't split invalid utf8 + if !utf8.ValidString(src) { + return []string{src} + } + entries = []string{} + var runes [][]rune + lastClass := 0 + // split into fields based on class of unicode character + for _, r := range src { + var class int + switch true { + case unicode.IsLower(r): + class = 1 + case unicode.IsUpper(r): + class = 2 + case unicode.IsDigit(r): + class = 3 + default: + class = 4 + } + if class == lastClass { + runes[len(runes)-1] = append(runes[len(runes)-1], r) + } else { + runes = append(runes, []rune{r}) + } + lastClass = class + } + // handle upper case -> lower case sequences, e.g. + // "PDFL", "oader" -> "PDF", "Loader" + for i := 0; i < len(runes)-1; i++ { + if unicode.IsUpper(runes[i][0]) && unicode.IsLower(runes[i+1][0]) { + runes[i+1] = append([]rune{runes[i][len(runes[i])-1]}, runes[i+1]...) + runes[i] = runes[i][:len(runes[i])-1] + } + } + // construct []string from results + for _, s := range runes { + if len(s) > 0 { + entries = append(entries, string(s)) + } + } + return +} diff --git a/vendor/github.com/alecthomas/kingpin/clause.go b/vendor/github.com/alecthomas/kingpin/clause.go new file mode 100644 index 0000000000..6c024caed3 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/clause.go @@ -0,0 +1,331 @@ +package kingpin + +import ( + "net" + + "github.com/alecthomas/units" +) + +// A Clause represents a flag or an argument passed by the user. +type Clause struct { + actionMixin + completionsMixin + + name string + shorthand rune + help string + placeholder string + hidden bool + defaultValues []string + value Value + required bool + envar string + noEnvar bool +} + +func NewClause(name, help string) *Clause { + return &Clause{ + name: name, + help: help, + } +} + +func (c *Clause) consumesRemainder() bool { + if r, ok := c.value.(cumulativeValue); ok { + return r.IsCumulative() + } + return false +} + +func (c *Clause) init() error { + if c.required && len(c.defaultValues) > 0 { + return TError("required flag '--{{.Arg0}}' with default value that will never be used", V{"Arg0": c.name}) + } + if c.value == nil { + return TError("no type defined for --{{.Arg0}} (eg. .String())", V{"Arg0": c.name}) + } + if v, ok := c.value.(cumulativeValue); (!ok || !v.IsCumulative()) && len(c.defaultValues) > 1 { + return TError("invalid default for '--{{.Arg0}}', expecting single value", V{"Arg0": c.name}) + } + return nil +} + +func (c *Clause) Help(help string) *Clause { + c.help = help + return c +} + +// UsageAction adds a PreAction() that will display the given UsageContext. +func (c *Clause) UsageAction(context *UsageContext) *Clause { + c.PreAction(func(e *ParseElement, c *ParseContext) error { + c.Application.UsageForContextWithTemplate(context, c) + c.Application.terminate(0) + return nil + }) + return c +} + +func (c *Clause) UsageActionTemplate(template string) *Clause { + return c.UsageAction(&UsageContext{Template: template}) +} + +// Action adds a callback action to be executed after the command line is parsed and any +// non-terminating builtin actions have completed (eg. help, completion, etc.). +func (c *Clause) Action(action Action) *Clause { + c.actions = append(c.actions, action) + return c +} + +// PreAction adds a callback action to be executed after flag values are parsed but before +// any other processing, such as help, completion, etc. +func (c *Clause) PreAction(action Action) *Clause { + c.preActions = append(c.preActions, action) + return c +} + +// HintAction registers a HintAction (function) for the flag to provide completions +func (c *Clause) HintAction(action HintAction) *Clause { + c.addHintAction(action) + return c +} + +// Envar overrides the default value(s) for a flag from an environment variable, +// if it is set. Several default values can be provided by using new lines to +// separate them. +func (c *Clause) Envar(name string) *Clause { + c.envar = name + c.noEnvar = false + return c +} + +// NoEnvar forces environment variable defaults to be disabled for this flag. +// Most useful in conjunction with PrefixedEnvarResolver. +func (c *Clause) NoEnvar() *Clause { + c.envar = "" + c.noEnvar = true + return c +} + +func (c *Clause) resolveCompletions() []string { + var hints []string + + options := c.builtinHintActions + if len(c.hintActions) > 0 { + // User specified their own hintActions. Use those instead. + options = c.hintActions + } + + for _, hintAction := range options { + hints = append(hints, hintAction()...) + } + return hints +} + +// HintOptions registers any number of options for the flag to provide completions +func (c *Clause) HintOptions(options ...string) *Clause { + c.addHintAction(func() []string { + return options + }) + return c +} + +// Default values for this flag. They *must* be parseable by the value of the flag. +func (c *Clause) Default(values ...string) *Clause { + c.defaultValues = values + return c +} + +// PlaceHolder sets the place-holder string used for flag values in the help. The +// default behaviour is to use the value provided by Default() if provided, +// then fall back on the capitalized flag name. +func (c *Clause) PlaceHolder(placeholder string) *Clause { + c.placeholder = placeholder + return c +} + +// Hidden hides a flag from usage but still allows it to be used. +func (c *Clause) Hidden() *Clause { + c.hidden = true + return c +} + +// Required makes the flag required. You can not provide a Default() value to a Required() flag. +func (c *Clause) Required() *Clause { + c.required = true + return c +} + +// Short sets the short flag name. +func (c *Clause) Short(name rune) *Clause { + c.shorthand = name + return c +} + +func (c *Clause) needsValue(context *ParseContext) bool { + return c.required && !c.canResolve(context) +} + +func (c *Clause) canResolve(context *ParseContext) bool { + for _, resolver := range context.resolvers { + rvalues, err := resolver.Resolve(c.name, context) + if err != nil { + return false + } + if rvalues != nil { + return true + } + } + return false +} + +func (c *Clause) reset() { + if c, ok := c.value.(cumulativeValue); ok { + c.Reset() + } +} + +func (c *Clause) setDefault(context *ParseContext) error { + var values []string + for _, resolver := range context.resolvers { + rvalues, err := resolver.Resolve(c.name, context) + if err != nil { + return err + } + if rvalues != nil { + values = rvalues + } + } + + if values != nil { + c.reset() + for _, value := range values { + if err := c.value.Set(value); err != nil { + return err + } + } + return nil + } + return nil +} + +func (c *Clause) SetValue(value Value) { + c.value = value +} + +// StringMap provides key=value parsing into a map. +func (c *Clause) StringMap(options ...AccumulatorOption) (target *map[string]string) { + target = &(map[string]string{}) + c.StringMapVar(target, options...) + return +} + +// StringMap provides key=value parsing into a map. +func (c *Clause) StringMapVar(target *map[string]string, options ...AccumulatorOption) { + c.SetValue(newStringMapValue(target, options...)) +} + +// Bytes parses numeric byte units. eg. 1.5KB +func (c *Clause) Bytes() (target *units.Base2Bytes) { + target = new(units.Base2Bytes) + c.BytesVar(target) + return +} + +// ExistingFile sets the parser to one that requires and returns an existing file. +func (c *Clause) ExistingFile() (target *string) { + target = new(string) + c.ExistingFileVar(target) + return +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (c *Clause) ExistingDir() (target *string) { + target = new(string) + c.ExistingDirVar(target) + return +} + +// ExistingFileOrDir sets the parser to one that requires and returns an existing file OR directory. +func (c *Clause) ExistingFileOrDir() (target *string) { + target = new(string) + c.ExistingFileOrDirVar(target) + return +} + +// Float sets the parser to a float64 parser. +func (c *Clause) Float() (target *float64) { + return c.Float64() +} + +// Float sets the parser to a float64 parser. +func (c *Clause) FloatVar(target *float64) { + c.Float64Var(target) +} + +// BytesVar parses numeric byte units. eg. 1.5KB +func (c *Clause) BytesVar(target *units.Base2Bytes) { + c.SetValue(newBytesValue(target)) +} + +// ExistingFile sets the parser to one that requires and returns an existing file. +func (c *Clause) ExistingFileVar(target *string) { + c.SetValue(newExistingFileValue(target)) +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (c *Clause) ExistingDirVar(target *string) { + c.SetValue(newExistingDirValue(target)) +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (c *Clause) ExistingFileOrDirVar(target *string) { + c.SetValue(newExistingFileOrDirValue(target)) +} + +// Enum allows a value from a set of options. +func (c *Clause) Enum(options ...string) (target *string) { + target = new(string) + c.EnumVar(target, options...) + return +} + +// EnumVar allows a value from a set of options. +func (c *Clause) EnumVar(target *string, options ...string) { + c.addHintActionBuiltin(func() []string { return options }) + c.SetValue(newEnumFlag(target, options...)) +} + +// Enums allows a set of values from a set of options. +func (c *Clause) Enums(options ...string) (target *[]string) { + target = new([]string) + c.EnumsVar(target, options...) + return +} + +// EnumsVar allows a value from a set of options. +func (c *Clause) EnumsVar(target *[]string, options ...string) { + c.SetValue(newEnumsFlag(target, options...)) +} + +// Counter increments a number each time it is encountered. +func (c *Clause) Counter() (target *int) { + target = new(int) + c.CounterVar(target) + return +} + +func (c *Clause) CounterVar(target *int) { + c.SetValue(newCounterValue(target)) +} + +// IP provides a validated parsed *net.IP value. +func (c *Clause) IP() (target *net.IP) { + target = new(net.IP) + c.IPVar(target) + return +} + +// IPVar provides a validated parsed *net.IP value. +func (c *Clause) IPVar(target *net.IP) { + c.SetValue(newIPValue(target)) +} diff --git a/vendor/github.com/alecthomas/kingpin/cmd.go b/vendor/github.com/alecthomas/kingpin/cmd.go new file mode 100644 index 0000000000..23edd5ffc2 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/cmd.go @@ -0,0 +1,317 @@ +package kingpin + +import ( + "errors" + "strings" +) + +type cmdMixin struct { + actionMixin + *flagGroup + *argGroup + *cmdGroup +} + +// CmdCompletion returns completion options for arguments, if that's where +// parsing left off, or commands if there aren't any unsatisfied args. +func (c *cmdMixin) CmdCompletion(context *ParseContext) []string { + var options []string + + // Count args already satisfied - we won't complete those, and add any + // default commands' alternatives, since they weren't listed explicitly + // and the user may want to explicitly list something else. + argsSatisfied := 0 + for _, el := range context.Elements { + switch { + case el.OneOf.Arg != nil: + if el.Value != nil && *el.Value != "" { + argsSatisfied++ + } + case el.OneOf.Cmd != nil: + options = append(options, el.OneOf.Cmd.completionAlts...) + default: + } + } + + if argsSatisfied < len(c.argGroup.args) { + // Since not all args have been satisfied, show options for the current one + options = append(options, c.argGroup.args[argsSatisfied].resolveCompletions()...) + } else { + // If all args are satisfied, then go back to completing commands + for _, cmd := range c.cmdGroup.commandOrder { + if !cmd.hidden { + options = append(options, cmd.name) + } + } + } + + return options +} + +func (c *cmdMixin) FlagCompletion(flagName string, flagValue string) (choices []string, flagMatch bool, optionMatch bool) { + // Check if flagName matches a known flag. + // If it does, show the options for the flag + // Otherwise, show all flags + + options := []string{} + + for _, flag := range c.flagGroup.flagOrder { + // Loop through each flag and determine if a match exists + if flag.name == flagName { + // User typed entire flag. Need to look for flag options. + options = flag.resolveCompletions() + if len(options) == 0 { + // No Options to Choose From, Assume Match. + return options, true, true + } + + // Loop options to find if the user specified value matches + isPrefix := false + matched := false + + for _, opt := range options { + if flagValue == opt { + matched = true + } else if strings.HasPrefix(opt, flagValue) { + isPrefix = true + } + } + + // Matched Flag Directly + // Flag Value Not Prefixed, and Matched Directly + return options, true, !isPrefix && matched + } + + if !flag.hidden { + options = append(options, "--"+flag.name) + } + } + // No Flag directly matched. + return options, false, false + +} + +type cmdGroup struct { + app *Application + parent *CmdClause + commands map[string]*CmdClause + commandOrder []*CmdClause +} + +func (c *cmdGroup) defaultSubcommand() *CmdClause { + for _, cmd := range c.commandOrder { + if cmd.isDefault { + return cmd + } + } + return nil +} + +func (c *cmdGroup) cmdNames() []string { + names := make([]string, 0, len(c.commandOrder)) + for _, cmd := range c.commandOrder { + names = append(names, cmd.name) + } + return names +} + +// GetArg gets a command definition. +// +// This allows existing commands to be modified after definition but before parsing. Useful for +// modular applications. +func (c *cmdGroup) GetCommand(name string) *CmdClause { + return c.commands[name] +} + +func newCmdGroup(app *Application) *cmdGroup { + return &cmdGroup{ + app: app, + commands: make(map[string]*CmdClause), + } +} + +func (c *cmdGroup) addCommand(name, help string) *CmdClause { + cmd := newCommand(c.app, name, help) + c.commands[name] = cmd + c.commandOrder = append(c.commandOrder, cmd) + return cmd +} + +func (c *cmdGroup) init() error { + seen := map[string]bool{} + if c.defaultSubcommand() != nil && !c.have() { + return TError("default subcommand {{.Arg0}} provided but no subcommands defined", V{"Arg0": c.defaultSubcommand().name}) + } + defaults := []string{} + for _, cmd := range c.commandOrder { + if cmd.isDefault { + defaults = append(defaults, cmd.name) + } + if seen[cmd.name] { + return TError("duplicate command {{.Arg0}}", V{"Arg0": cmd.name}) + } + seen[cmd.name] = true + for _, alias := range cmd.aliases { + if seen[alias] { + return TError("alias duplicates existing command {{.Arg0}}", V{"Arg0": alias}) + } + c.commands[alias] = cmd + } + if err := cmd.init(); err != nil { + return err + } + } + if len(defaults) > 1 { + return TError("more than one default subcommand exists: {{.Arg0}}", V{"Arg0": strings.Join(defaults, ", ")}) + } + return nil +} + +func (c *cmdGroup) have() bool { + return len(c.commands) > 0 +} + +type CmdClauseValidator func(*CmdClause) error + +// A CmdClause is a single top-level command. It encapsulates a set of flags +// and either subcommands or positional arguments. +type CmdClause struct { + cmdMixin + app *Application + name string + aliases []string + help string + isDefault bool + validator CmdClauseValidator + hidden bool + completionAlts []string + optionalSubcommands bool +} + +func newCommand(app *Application, name, help string) *CmdClause { + c := &CmdClause{ + app: app, + name: name, + help: help, + } + c.flagGroup = newFlagGroup() + c.argGroup = newArgGroup() + c.cmdGroup = newCmdGroup(app) + return c +} + +// Struct allows applications to define flags with struct tags. +// +// Supported struct tags are: help, placeholder, default, short, long, required, hidden, env, +// enum, and arg. +// +// The name of the flag will default to the CamelCase name transformed to camel-case. This can +// be overridden with the "long" tag. +// +// All basic Go types are supported including floats, ints, strings, time.Duration, +// and slices of same. +// +// For compatibility, also supports the tags used by https://github.com/jessevdk/go-flags +func (c *CmdClause) Struct(v interface{}) error { + return c.fromStruct(c, v) +} + +// Add an Alias for this command. +func (c *CmdClause) Alias(name string) *CmdClause { + c.aliases = append(c.aliases, name) + return c +} + +// Validate sets a validation function to run when parsing. +func (c *CmdClause) Validate(validator CmdClauseValidator) *CmdClause { + c.validator = validator + return c +} + +// FullCommand returns the fully qualified "path" to this command, +// including interspersed argument placeholders. Does not include trailing +// argument placeholders. +// +// eg. "signup " +func (c *CmdClause) FullCommand() string { + return strings.Join(c.fullCommand(), " ") +} + +func (c *CmdClause) fullCommand() (out []string) { + out = append(out, c.name) + for _, arg := range c.args { + text := "<" + arg.name + ">" + if _, ok := arg.value.(cumulativeValue); ok { + text += " ..." + } + if !arg.required { + text = "[" + text + "]" + } + out = append(out, text) + } + if c.parent != nil { + out = append(c.parent.fullCommand(), out...) + } + return +} + +// Command adds a new sub-command. +func (c *CmdClause) Command(name, help string) *CmdClause { + cmd := c.addCommand(name, help) + cmd.parent = c + return cmd +} + +// OptionalSubcommands makes subcommands optional +func (c *CmdClause) OptionalSubcommands() *CmdClause { + c.optionalSubcommands = true + return c +} + +// Default makes this command the default if commands don't match. +func (c *CmdClause) Default() *CmdClause { + c.isDefault = true + return c +} + +func (c *CmdClause) Action(action Action) *CmdClause { + c.addAction(action) + return c +} + +func (c *CmdClause) PreAction(action Action) *CmdClause { + c.addPreAction(action) + return c +} + +func (c *cmdMixin) checkArgCommandMixing() error { + if c.argGroup.have() && c.cmdGroup.have() { + for _, arg := range c.args { + if arg.consumesRemainder() { + return errors.New("cannot mix cumulative Arg() with Command()s") + } + if !arg.required { + return errors.New("Arg()s mixed with Command()s MUST be required") + } + } + } + return nil +} + +func (c *CmdClause) init() error { + if err := c.flagGroup.init(); err != nil { + return err + } + if err := c.checkArgCommandMixing(); err != nil { + return err + } + if err := c.argGroup.init(); err != nil { + return err + } + return c.cmdGroup.init() +} + +func (c *CmdClause) Hidden() *CmdClause { + c.hidden = true + return c +} diff --git a/vendor/github.com/alecthomas/kingpin/completions.go b/vendor/github.com/alecthomas/kingpin/completions.go new file mode 100644 index 0000000000..655758ec89 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/completions.go @@ -0,0 +1,34 @@ +package kingpin + +// HintAction is a function type who is expected to return a slice of possible +// command line arguments. +type HintAction func() []string + +type completionsMixin struct { + hintActions []HintAction + builtinHintActions []HintAction +} + +func (a *completionsMixin) addHintAction(action HintAction) { + a.hintActions = append(a.hintActions, action) +} + +// Allow adding of HintActions which are added internally, ie, EnumVar +func (a *completionsMixin) addHintActionBuiltin(action HintAction) { + a.builtinHintActions = append(a.builtinHintActions, action) +} + +func (a *completionsMixin) resolveCompletions() []string { + var hints []string + + options := a.builtinHintActions + if len(a.hintActions) > 0 { + // User specified their own hintActions. Use those instead. + options = a.hintActions + } + + for _, hintAction := range options { + hints = append(hints, hintAction()...) + } + return hints +} diff --git a/vendor/github.com/alecthomas/kingpin/doc.go b/vendor/github.com/alecthomas/kingpin/doc.go new file mode 100644 index 0000000000..c14762c5c4 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/doc.go @@ -0,0 +1,68 @@ +// Package kingpin provides command line interfaces like this: +// +// $ chat +// usage: chat [] [] [ ...] +// +// Flags: +// --debug enable debug mode +// --help Show help. +// --server=127.0.0.1 server address +// +// Commands: +// help +// Show help for a command. +// +// post [] +// Post a message to a channel. +// +// register +// Register a new user. +// +// $ chat help post +// usage: chat [] post [] [] +// +// Post a message to a channel. +// +// Flags: +// --image=IMAGE image to post +// +// Args: +// channel to post to +// [] text to post +// $ chat post --image=~/Downloads/owls.jpg pics +// +// From code like this: +// +// package main +// +// import "gopkg.in/alecthomas/kingpin.v1" +// +// var ( +// debug = kingpin.Flag("debug", "enable debug mode").Default("false").Bool() +// serverIP = kingpin.Flag("server", "server address").Default("127.0.0.1").IP() +// +// register = kingpin.Command("register", "Register a new user.") +// registerNick = register.Arg("nick", "nickname for user").Required().String() +// registerName = register.Arg("name", "name of user").Required().String() +// +// post = kingpin.Command("post", "Post a message to a channel.") +// postImage = post.Flag("image", "image to post").ExistingFile() +// postChannel = post.Arg("channel", "channel to post to").Required().String() +// postText = post.Arg("text", "text to post").String() +// ) +// +// func main() { +// switch kingpin.Parse() { +// // Register user +// case "register": +// println(*registerNick) +// +// // Post message +// case "post": +// if *postImage != nil { +// } +// if *postText != "" { +// } +// } +// } +package kingpin diff --git a/vendor/github.com/alecthomas/kingpin/flags.go b/vendor/github.com/alecthomas/kingpin/flags.go new file mode 100644 index 0000000000..04e98cc129 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/flags.go @@ -0,0 +1,141 @@ +package kingpin + +import "strings" + +type flagGroup struct { + short map[string]*Clause + long map[string]*Clause + flagOrder []*Clause +} + +func newFlagGroup() *flagGroup { + return &flagGroup{ + short: map[string]*Clause{}, + long: map[string]*Clause{}, + } +} + +// GetFlag gets a flag definition. +// +// This allows existing flags to be modified after definition but before parsing. Useful for +// modular applications. +func (f *flagGroup) GetFlag(name string) *Clause { + return f.long[name] +} + +// Flag defines a new flag with the given long name and help. +func (f *flagGroup) Flag(name, help string) *Clause { + flag := NewClause(name, help) + f.long[name] = flag + f.flagOrder = append(f.flagOrder, flag) + return flag +} + +func (f *flagGroup) init() error { + if err := f.checkDuplicates(); err != nil { + return err + } + for _, flag := range f.long { + if err := flag.init(); err != nil { + return err + } + if flag.shorthand != 0 { + f.short[string(flag.shorthand)] = flag + } + } + return nil +} + +func (f *flagGroup) checkDuplicates() error { + seenShort := map[rune]bool{} + seenLong := map[string]bool{} + for _, flag := range f.flagOrder { + if flag.shorthand != 0 { + if _, ok := seenShort[flag.shorthand]; ok { + return TError("duplicate short flag -{{.Arg0}}", V{"Arg0": flag.shorthand}) + } + seenShort[flag.shorthand] = true + } + if _, ok := seenLong[flag.name]; ok { + return TError("duplicate long flag --{{.Arg0}}", V{"Arg0": flag.name}) + } + seenLong[flag.name] = true + } + return nil +} + +func (f *flagGroup) parse(context *ParseContext) (*Clause, error) { + var token *Token + +loop: + for { + token = context.Peek() + switch token.Type { + case TokenEOL: + break loop + + case TokenLong, TokenShort: + flagToken := token + var flag *Clause + var ok bool + invert := false + + name := token.Value + if token.Type == TokenLong { + flag, ok = f.long[name] + if !ok { + if strings.HasPrefix(name, "no-") { + name = name[3:] + invert = true + flag, ok = f.long[name] + // Found an inverted flag. Check if the flag supports it. + if ok { + bf, bok := flag.value.(BoolFlag) + ok = bok && bf.BoolFlagIsNegatable() + } + } + } else if strings.HasPrefix(name, "no-") { + invert = true + } + if !ok { + return nil, TError("unknown long flag '{{.Arg0}}'", V{"Arg0": flagToken}) + } + } else { + flag, ok = f.short[name] + if !ok { + return nil, TError("unknown short flag '{{.Arg0}}'", V{"Arg0": flagToken}) + } + } + + context.Next() + + var defaultValue string + if isBoolFlag(flag.value) { + if invert { + defaultValue = "false" + } else { + defaultValue = "true" + } + } else { + if invert { + context.Push(token) + return nil, TError("unknown long flag '{{.Arg0}}'", V{"Arg0": flagToken}) + } + token = context.Peek() + if token.Type != TokenArg { + context.Push(token) + return nil, TError("expected argument for flag '{{.Arg0}}'", V{"Arg0": flagToken}) + } + context.Next() + defaultValue = token.Value + } + + context.matchedFlag(flag, defaultValue) + return flag, nil + + default: + break loop + } + } + return nil, nil +} diff --git a/vendor/github.com/alecthomas/kingpin/global.go b/vendor/github.com/alecthomas/kingpin/global.go new file mode 100644 index 0000000000..97f1e2af52 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/global.go @@ -0,0 +1,96 @@ +package kingpin + +import ( + "os" + "path/filepath" +) + +var ( + // CommandLine is the default Kingpin parser. + CommandLine = New(filepath.Base(os.Args[0]), "") +) + +// Command adds a new command to the default parser. +func Command(name, help string) *CmdClause { + return CommandLine.Command(name, help) +} + +// Flag adds a new flag to the default parser. +func Flag(name, help string) *Clause { + return CommandLine.Flag(name, help) +} + +// Arg adds a new argument to the top-level of the default parser. +func Arg(name, help string) *Clause { + return CommandLine.Arg(name, help) +} + +// Struct creates a command-line from a struct. +func Struct(v interface{}) *Application { + err := CommandLine.Struct(v) + FatalIfError(err, "") + return CommandLine +} + +// Parse and return the selected command. Will call the termination handler if +// an error is encountered. +func Parse() string { + selected := MustParse(CommandLine.Parse(os.Args[1:])) + if selected == "" && CommandLine.cmdGroup.have() { + Usage() + CommandLine.terminate(0) + } + return selected +} + +// Errorf prints an error message to stderr. +func Errorf(format string, args ...interface{}) { + CommandLine.Errorf(format, args...) +} + +// Fatalf prints an error message to stderr and exits. +func Fatalf(format string, args ...interface{}) { + CommandLine.Fatalf(format, args...) +} + +// FatalIfError prints an error and exits if err is not nil. The error is printed +// with the given prefix. +func FatalIfError(err error, format string, args ...interface{}) { + CommandLine.FatalIfError(err, format, args...) +} + +// FatalUsage prints an error message followed by usage information, then +// exits with a non-zero status. +func FatalUsage(format string, args ...interface{}) { + CommandLine.FatalUsage(format, args...) +} + +// FatalUsageContext writes a printf formatted error message to stderr, then +// usage information for the given ParseContext, before exiting. +func FatalUsageContext(context *ParseContext, format string, args ...interface{}) { + CommandLine.FatalUsageContext(context, format, args...) +} + +// Usage prints usage to stderr. +func Usage() { + CommandLine.Usage(os.Args[1:]) +} + +// UsageTemplate associates a template with a flag. The flag must be a Bool() and must +// already be defined. +func UsageTemplate(template string) *Application { + return CommandLine.UsageTemplate(template) +} + +// MustParse can be used with app.Parse(args) to exit with an error if parsing fails. +func MustParse(command string, err error) string { + if err != nil { + Fatalf(T("{{.Arg0}}, try --help", V{"Arg0": err})) + } + return command +} + +// Version adds a flag for displaying the application version number. +func Version(version string) *Application { + return CommandLine.Version(version) +} diff --git a/vendor/github.com/alecthomas/kingpin/guesswidth.go b/vendor/github.com/alecthomas/kingpin/guesswidth.go new file mode 100644 index 0000000000..a269531c86 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/guesswidth.go @@ -0,0 +1,9 @@ +// +build appengine !linux,!freebsd,!darwin,!dragonfly,!netbsd,!openbsd + +package kingpin + +import "io" + +func guessWidth(w io.Writer) int { + return 80 +} diff --git a/vendor/github.com/alecthomas/kingpin/guesswidth_unix.go b/vendor/github.com/alecthomas/kingpin/guesswidth_unix.go new file mode 100644 index 0000000000..ad8163f555 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/guesswidth_unix.go @@ -0,0 +1,38 @@ +// +build !appengine,linux freebsd darwin dragonfly netbsd openbsd + +package kingpin + +import ( + "io" + "os" + "strconv" + "syscall" + "unsafe" +) + +func guessWidth(w io.Writer) int { + // check if COLUMNS env is set to comply with + // http://pubs.opengroup.org/onlinepubs/009604499/basedefs/xbd_chap08.html + colsStr := os.Getenv("COLUMNS") + if colsStr != "" { + if cols, err := strconv.Atoi(colsStr); err == nil { + return cols + } + } + + if t, ok := w.(*os.File); ok { + fd := t.Fd() + var dimensions [4]uint16 + + if _, _, err := syscall.Syscall6( + syscall.SYS_IOCTL, + uintptr(fd), + uintptr(syscall.TIOCGWINSZ), + uintptr(unsafe.Pointer(&dimensions)), + 0, 0, 0, + ); err == 0 { + return int(dimensions[1]) + } + } + return 80 +} diff --git a/vendor/github.com/alecthomas/kingpin/i18n_en_AU.go b/vendor/github.com/alecthomas/kingpin/i18n_en_AU.go new file mode 100644 index 0000000000..bd8e81a27f --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/i18n_en_AU.go @@ -0,0 +1,3 @@ +package kingpin + +var i18n_en_AU = []byte("\x1f\x8b\b\x00\x00\x00\x00\x00\x02\xff\xa4XOk+7\x10\xbf\xfbS\f\xb9؆8\xa4\xf4f\xe8!\x94\xb4\x85\x06\x1e4\xbcB\xfb\xfa\x0e\xf2\xee쮨,m%\xad\x9d\x10\xf2\u074bv\xfdg\x93\xccHZ\xe7f\xd0\xfc\xfex\xa4\x19\x8d\xf6\xdb\f\xe0e\x06\x00p%˫5\\\xcd_^n\xeel}\xfb\xfa:\a\xe9@@)-\x16\xde\xd8\xe7\xab\xeb!\xce[\xa1\x9d\x12^\x1a\x9d\x00\xcc\x00^\xaf\x93\x02\x95T\x98\xcb\xddǒ\xb4k@k\x8d]\x03Cu^'\xe1\xfft\xb7\xb7?\x16\xf7_\x1e\xfa\x1f\x9c\x9f\xf7Q\x11*-U\x06\xd59\x8a\xa4\xba\xb3\xb5[,\xa1\x10z\xeea\x83P\x19\xa5\xcc\x1eK\xd8<\x83\xd0\xc67hAغۢ\xf60J\x16\xa3y1\x1di\xeeW\xd4h\x85G\x10\xb0\x15\x1aZQ\xe3\r#L\x86\xc6I\v\xb3m\x15\x06\x02p\x85\x95\xad\x87\xcaX\xf8\xfb\xf1\xb7\xa4F\x04y\x89\xe4F\xb8\xe6B\xcd\x01\x1a\x17UF\xd7Рj\x93\x1a\xa3H\x92\xf2K\xe7\xdb\xceCk\x9c\x93\x1b5\xf6\xe4z7\xbeA\xa8\xe5\x0eu\xd8c\xc7\xc9Me!\xad<6f\x0f\xa2m\x95,zbءu\xd2hN\x94\x8f\xe7\xe9\v\xa3=>\xf9\x95C\xed\xa4\x97;\x8cf1\n\xe1E\xc2:\x18\x1d\xb2\xb0\x15\xba\x8c\xb2\u007f\x88\x8d\xd3&\xb9\x18\x82oC稔\xa8\xdd\xd0;\xbe3LT$I)\x94\x14\x0e\xcan\xc8?:\xc0'\xe9\xbc\xd4\xf5\xf1\xbf\xc0\xa9\x170RS\x18h\v\x87\xb6\xc3\xf1\x1f\x97I\xf0\xd0ж\xf2\t\xeel\xbdX:\xd8K\xdf\xc0σ\xf2b\xe9\x18\xd2$,!\xe6M\xbbR\xb8Cu\xa9l\x92\x806pȨ6\x1e\\\x8b\x85\xac$\x96\x9c\x16\x19KҖX\x89Nyp\xdd\xe6ÞAk\xcdN\x96\xe1\xa6\xe8G\x9ea||̦l\\\x1cL\n\x9f\xe7!\x86|\x14@\x12t\xfa㕛\xae\xef\x14*%5I!\x83\xf8_m\xf6\x9ae\x1aVc\xd0\xd1\b\x9bc-\x86\x89ʌ\xc6\xd7|\x1d\x06D\n\x8d\xe6Qo\x9fa\xb5\n\xefx\x86\x9f\x8e\xa5\xfd;Q\xe3\x9a\xf39,\x92\xc0_\xc2ӟ\x03\x1e\x16\xd9\x0f\x8f\xeb\xc8WD\x06vx\xba\xb2\xd0\xf3:\xfdm\xe4\xfc\xfc\xe3\x18ބ\x04\x92\xd9\xf7\xd9\xff\x01\x00\x00\xff\xffrWY\x98\xc4\x16\x00\x00") diff --git a/vendor/github.com/alecthomas/kingpin/i18n_fr.go b/vendor/github.com/alecthomas/kingpin/i18n_fr.go new file mode 100644 index 0000000000..1f27521adc --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/i18n_fr.go @@ -0,0 +1,3 @@ +package kingpin + +var i18n_fr = []byte("\x1f\x8b\b\x00\x00\x00\x00\x00\x02\xff\x94\xd1A\xaa\x830\x10\x06\xe0}N1d\xf36\xea\x01\xdc\xc9{\xbc\v\xb8,]L㨁\x98H&i\x85һ\x17\xd1nJ\a\xe9\xfa\xcf\xf7\a\xe6?)\x80\xbb\x02\x00ж\xd35\xe8v\f70\xc1'ZR\xc9\xe4\xd9&{%\x18\xc9͕.\xb6\x97)\xa2g\x87\xc9\x06\xbf\x92\xa6\xef\xad\x19)\x82\xfbA\xdb\xd1Kgr\x8e*\xad\x00\x1e\xc5\xfb/\x99q\xa0Z\xe8\xdbÏ\xf0\xdf\xe1\xc0\x12\xfc\x8b8\x13\xe6E\xb0M\x94i\x13\x87<\x91O,\xd8\xdf0M\xe8;\xd1\xef9I\xbe\xcd\x17sPц̥9\xeaY\xe7\xf9b\x8c\xed\xfe\uab1e\x01\x00\x00\xff\xff\x1esa\xbf\xe9\x01\x00\x00") diff --git a/vendor/github.com/alecthomas/kingpin/i18n_init.go b/vendor/github.com/alecthomas/kingpin/i18n_init.go new file mode 100644 index 0000000000..61b62b3c05 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/i18n_init.go @@ -0,0 +1,82 @@ +package kingpin + +//go:generate go run ./cmd/embedi18n/main.go en-AU +//go:generate go run ./cmd/embedi18n/main.go fr + +import ( + "bytes" + "compress/gzip" + "io/ioutil" + "os" + "strings" + + "github.com/nicksnyder/go-i18n/i18n" +) + +type tError struct { + msg string + args []interface{} +} + +// TError is an error that translates itself. +// +// It has the same signature and usage as T(). +func TError(msg string, args ...interface{}) error { return &tError{msg: msg, args: args} } +func (i *tError) Error() string { return T(i.msg, i.args...) } + +// T is a translation function. +var T = initI18N() + +func initI18N() i18n.TranslateFunc { + // Initialise translations. + i18n.ParseTranslationFileBytes("i18n/en-AU.all.json", decompressLang(i18n_en_AU)) + i18n.ParseTranslationFileBytes("i18n/fr.all.json", decompressLang(i18n_fr)) + + // Detect language. + lang := detectLang() + t, err := i18n.Tfunc(lang, "en") + if err != nil { + panic(err) + } + return t +} + +func detectLang() string { + lang := os.Getenv("LANG") + if lang == "" { + return "en" + } + // Remove encoding spec (eg. ".UTF-8") + if idx := strings.Index(lang, "."); idx != -1 { + lang = lang[0:idx] + } + // en_AU -> en-AU + return strings.Replace(lang, "_", "-", -1) +} + +func decompressLang(data []byte) []byte { + r := bytes.NewReader(data) + gr, err := gzip.NewReader(r) + if err != nil { + panic(err) + } + out, err := ioutil.ReadAll(gr) + if err != nil { + panic(err) + } + return out +} + +// SetLanguage sets the language for Kingpin. +func SetLanguage(lang string, others ...string) error { + t, err := i18n.Tfunc(lang, others...) + if err != nil { + return err + } + T = t + return nil +} + +// V is a convenience alias for translation function variables. +// eg. T("Something {{.Arg0}}", V{"Arg0": "moo"}) +type V map[string]interface{} diff --git a/vendor/github.com/alecthomas/kingpin/model.go b/vendor/github.com/alecthomas/kingpin/model.go new file mode 100644 index 0000000000..d5c1d1f8df --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/model.go @@ -0,0 +1,298 @@ +// nolint: golint +package kingpin + +import ( + "fmt" + "strconv" + "strings" +) + +// Data model for Kingpin command-line structure. + +type FlagGroupModel struct { + Flags []*ClauseModel +} + +func (f *FlagGroupModel) FlagByName(name string) *ClauseModel { + for _, flag := range f.Flags { + if flag.Name == name { + return flag + } + } + return nil +} + +func (f *FlagGroupModel) FlagSummary() string { + out := []string{} + count := 0 + for _, flag := range f.Flags { + if flag.Name != "help" { + count++ + } + if flag.Required { + if flag.IsBoolFlag() { + if flag.IsNegatable() { + out = append(out, fmt.Sprintf("--[no-]%s", flag.Name)) + } else { + out = append(out, fmt.Sprintf("--%s", flag.Name)) + } + } else { + out = append(out, fmt.Sprintf("--%s=%s", flag.Name, flag.FormatPlaceHolder())) + } + } + } + if count != len(out) { + out = append(out, T("[]")) + } + return strings.Join(out, " ") +} + +type ClauseModel struct { + Name string + Help string + Short rune + Default []string + PlaceHolder string + Required bool + Hidden bool + Value Value + Cumulative bool +} + +func (c *ClauseModel) String() string { + return c.Value.String() +} + +func (c *ClauseModel) IsBoolFlag() bool { + return isBoolFlag(c.Value) +} + +func (c *ClauseModel) IsNegatable() bool { + bf, ok := c.Value.(BoolFlag) + return ok && bf.BoolFlagIsNegatable() +} + +func (c *ClauseModel) FormatPlaceHolder() string { + if c.PlaceHolder != "" { + return c.PlaceHolder + } + if len(c.Default) > 0 { + ellipsis := "" + if len(c.Default) > 1 { + ellipsis = "..." + } + if _, ok := c.Value.(*stringValue); ok { + return strconv.Quote(c.Default[0]) + ellipsis + } + return c.Default[0] + ellipsis + } + return strings.ToUpper(c.Name) +} + +type ArgGroupModel struct { + Args []*ClauseModel +} + +func (a *ArgGroupModel) ArgSummary() string { + depth := 0 + out := []string{} + for _, arg := range a.Args { + h := "<" + arg.Name + ">" + if arg.Cumulative { + h += " ..." + } + if !arg.Required { + h = "[" + h + depth++ + } + out = append(out, h) + } + if len(out) == 0 { + return "" + } + out[len(out)-1] = out[len(out)-1] + strings.Repeat("]", depth) + return strings.Join(out, " ") +} + +type CmdGroupModel struct { + Commands []*CmdModel +} + +func (c *CmdGroupModel) FlattenedCommands() (out []*CmdModel) { + for _, cmd := range c.Commands { + if cmd.OptionalSubcommands { + out = append(out, cmd) + } + if len(cmd.Commands) == 0 { + out = append(out, cmd) + } + out = append(out, cmd.FlattenedCommands()...) + } + return +} + +type CmdModel struct { + Name string + Aliases []string + Help string + Depth int + Hidden bool + Default bool + OptionalSubcommands bool + Parent *CmdModel + *FlagGroupModel + *ArgGroupModel + *CmdGroupModel +} + +func (c *CmdModel) String() string { + return c.CmdSummary() +} + +func (c *CmdModel) CmdSummary() string { + out := []string{} + for cursor := c; cursor != nil; cursor = cursor.Parent { + text := cursor.Name + if cursor.Default { + text = "*" + text + } + if flags := cursor.FlagSummary(); flags != "" { + text += " " + flags + } + if args := cursor.ArgSummary(); args != "" { + text += " " + args + } + out = append([]string{text}, out...) + } + return strings.Join(out, " ") +} + +// FullCommand is the command path to this node, excluding positional arguments and flags. +func (c *CmdModel) FullCommand() string { + out := []string{} + for i := c; i != nil; i = i.Parent { + out = append([]string{i.Name}, out...) + } + return strings.Join(out, " ") +} + +type ApplicationModel struct { + Name string + Help string + Version string + Author string + *ArgGroupModel + *CmdGroupModel + *FlagGroupModel +} + +func (a *ApplicationModel) AppSummary() string { + summary := a.Name + if flags := a.FlagSummary(); flags != "" { + summary += " " + flags + } + if args := a.ArgSummary(); args != "" { + summary += " " + args + } + if len(a.Commands) > 0 { + summary += " " + } + return summary +} + +func (a *ApplicationModel) FindModelForCommand(cmd *CmdClause) *CmdModel { + if cmd == nil { + return nil + } + path := []string{} + for c := cmd; c != nil; c = c.parent { + path = append([]string{c.name}, path...) + } + var selected *CmdModel + cursor := a.CmdGroupModel + for _, component := range path { + for _, cmd := range cursor.Commands { + if cmd.Name == component { + selected = cmd + cursor = cmd.CmdGroupModel + break + } + } + } + if selected == nil { + panic("this shouldn't happen") + } + return selected +} + +func (a *Application) Model() *ApplicationModel { + return &ApplicationModel{ + Name: a.Name, + Help: a.Help, + Version: a.version, + Author: a.author, + FlagGroupModel: a.flagGroup.Model(), + ArgGroupModel: a.argGroup.Model(), + CmdGroupModel: a.cmdGroup.Model(nil), + } +} + +func (a *argGroup) Model() *ArgGroupModel { + m := &ArgGroupModel{} + for _, arg := range a.args { + m.Args = append(m.Args, arg.Model()) + } + return m +} + +func (f *flagGroup) Model() *FlagGroupModel { + m := &FlagGroupModel{} + for _, fl := range f.flagOrder { + m.Flags = append(m.Flags, fl.Model()) + } + return m +} + +func (f *Clause) Model() *ClauseModel { + _, cumulative := f.value.(cumulativeValue) + return &ClauseModel{ + Name: f.name, + Help: f.help, + Short: f.shorthand, + Default: f.defaultValues, + PlaceHolder: f.placeholder, + Required: f.required, + Hidden: f.hidden, + Value: f.value, + Cumulative: cumulative, + } +} + +func (c *cmdGroup) Model(parent *CmdModel) *CmdGroupModel { + m := &CmdGroupModel{} + for _, cm := range c.commandOrder { + m.Commands = append(m.Commands, cm.Model(parent)) + } + return m +} + +func (c *CmdClause) Model(parent *CmdModel) *CmdModel { + depth := 0 + for i := c; i != nil; i = i.parent { + depth++ + } + cmd := &CmdModel{ + Name: c.name, + Parent: parent, + Aliases: c.aliases, + Help: c.help, + Depth: depth, + Hidden: c.hidden, + Default: c.isDefault, + OptionalSubcommands: c.optionalSubcommands, + FlagGroupModel: c.flagGroup.Model(), + ArgGroupModel: c.argGroup.Model(), + } + cmd.CmdGroupModel = c.cmdGroup.Model(cmd) + return cmd +} diff --git a/vendor/github.com/alecthomas/kingpin/parser.go b/vendor/github.com/alecthomas/kingpin/parser.go new file mode 100644 index 0000000000..5a77d87eeb --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/parser.go @@ -0,0 +1,437 @@ +package kingpin + +import ( + "bufio" + "os" + "strings" + "unicode/utf8" +) + +type TokenType int + +// Token types. +const ( + TokenShort TokenType = iota + TokenLong + TokenArg + TokenError + TokenEOL +) + +func (t TokenType) String() string { + switch t { + case TokenShort: + return T("short flag") + case TokenLong: + return T("long flag") + case TokenArg: + return T("argument") + case TokenError: + return T("error") + case TokenEOL: + return T("") + } + return T("unknown") +} + +var ( + TokenEOLMarker = Token{-1, TokenEOL, ""} +) + +type Token struct { + Index int + Type TokenType + Value string +} + +func (t *Token) Equal(o *Token) bool { + return t.Index == o.Index +} + +func (t *Token) IsFlag() bool { + return t.Type == TokenShort || t.Type == TokenLong +} + +func (t *Token) IsEOF() bool { + return t.Type == TokenEOL +} + +func (t *Token) String() string { + switch t.Type { + case TokenShort: + return "-" + t.Value + case TokenLong: + return "--" + t.Value + case TokenArg: + return t.Value + case TokenError: + return T("error: ") + t.Value + case TokenEOL: + return T("") + default: + panic("unhandled type") + } +} + +type OneOfClause struct { + Flag *Clause + Arg *Clause + Cmd *CmdClause +} + +// A ParseElement represents the parsers view of each element in the command-line argument slice. +type ParseElement struct { + // Clause associated with this element. Exactly one of these will be present. + OneOf OneOfClause + // Value is corresponding value for an argument or flag. For commands this value will be nil. + Value *string +} + +// ParseElements represents each element in the command-line argument slice. +type ParseElements []*ParseElement + +// FlagMap collects all parsed flags into a map keyed by long name. +func (p ParseElements) FlagMap() map[string]*ParseElement { + // Collect flags into maps. + flags := map[string]*ParseElement{} + for _, element := range p { + if element.OneOf.Flag != nil { + flags[element.OneOf.Flag.name] = element + } + } + return flags +} + +// ArgMap collects all parsed positional arguments into a map keyed by long name. +func (p ParseElements) ArgMap() map[string]*ParseElement { + flags := map[string]*ParseElement{} + for _, element := range p { + if element.OneOf.Arg != nil { + flags[element.OneOf.Arg.name] = element + } + } + return flags +} + +// ParseContext holds the current context of the parser. When passed to +// Action() callbacks Elements will be fully populated with *FlagClause, +// *ArgClause and *CmdClause values and their corresponding arguments (if +// any). +type ParseContext struct { + Application *Application // May be nil in tests. + SelectedCommand *CmdClause + resolvers []Resolver + ignoreDefault bool + argsOnly bool + peek []*Token + argi int // Index of current command-line arg we're processing. + args []string + rawArgs []string + flags *flagGroup + arguments *argGroup + argumenti int // Cursor into arguments + // Flags, arguments and commands encountered and collected during parse. + Elements ParseElements +} + +func (p *ParseContext) CombinedFlagsAndArgs() []*Clause { + return append(p.Args(), p.Flags()...) +} + +func (p *ParseContext) Args() []*Clause { + return p.arguments.args +} + +func (p *ParseContext) Flags() []*Clause { + return p.flags.flagOrder +} + +// LastCmd returns true if the element is the last (sub)command being evaluated. +func (p *ParseContext) LastCmd(element *ParseElement) bool { + lastCmdIndex := -1 + eIndex := -2 + for i, e := range p.Elements { + if element == e { + eIndex = i + } + + if e.OneOf.Cmd != nil { + lastCmdIndex = i + } + } + return lastCmdIndex == eIndex +} + +func (p *ParseContext) nextArg() *Clause { + if p.argumenti >= len(p.arguments.args) { + return nil + } + arg := p.arguments.args[p.argumenti] + if !arg.consumesRemainder() { + p.argumenti++ + } + return arg +} + +func (p *ParseContext) next() { + p.argi++ + p.args = p.args[1:] +} + +// HasTrailingArgs returns true if there are unparsed command-line arguments. +// This can occur if the parser can not match remaining arguments. +func (p *ParseContext) HasTrailingArgs() bool { + return len(p.args) > 0 +} + +func tokenize(args []string, ignoreDefault bool, resolvers []Resolver) *ParseContext { + return &ParseContext{ + ignoreDefault: ignoreDefault, + args: args, + rawArgs: args, + flags: newFlagGroup(), + arguments: newArgGroup(), + resolvers: resolvers, + } +} + +func (p *ParseContext) mergeFlags(flags *flagGroup) { + for _, flag := range flags.flagOrder { + if flag.shorthand != 0 { + p.flags.short[string(flag.shorthand)] = flag + } + p.flags.long[flag.name] = flag + p.flags.flagOrder = append(p.flags.flagOrder, flag) + } +} + +func (p *ParseContext) mergeArgs(args *argGroup) { + p.arguments.args = append(p.arguments.args, args.args...) +} + +func (p *ParseContext) EOL() bool { + return p.Peek().Type == TokenEOL +} + +// Next token in the parse context. +func (p *ParseContext) Next() *Token { + if len(p.peek) > 0 { + return p.pop() + } + + // End of tokens. + if len(p.args) == 0 { + return &Token{Index: p.argi, Type: TokenEOL} + } + + arg := p.args[0] + p.next() + + if p.argsOnly { + return &Token{p.argi, TokenArg, arg} + } + + // All remaining args are passed directly. + if arg == "--" { + p.argsOnly = true + return p.Next() + } + + if strings.HasPrefix(arg, "--") { + parts := strings.SplitN(arg[2:], "=", 2) + token := &Token{p.argi, TokenLong, parts[0]} + if len(parts) == 2 { + p.Push(&Token{p.argi, TokenArg, parts[1]}) + } + return token + } + + if strings.HasPrefix(arg, "-") { + if len(arg) == 1 { + return &Token{Index: p.argi, Type: TokenShort} + } + rn, size := utf8.DecodeRuneInString(arg[1:]) + short := string(rn) + flag, ok := p.flags.short[short] + // Not a known short flag, we'll just return it anyway. + if !ok { + } else if isBoolFlag(flag.value) { + // Bool short flag. + } else { + // Short flag with combined argument: -fARG + token := &Token{p.argi, TokenShort, short} + if len(arg) > 2 { + p.Push(&Token{p.argi, TokenArg, arg[1+size:]}) + } + return token + } + + if len(arg) > 1+size { + p.args = append([]string{"-" + arg[1+size:]}, p.args...) + } + return &Token{p.argi, TokenShort, short} + } + + return &Token{p.argi, TokenArg, arg} +} + +func (p *ParseContext) Peek() *Token { + if len(p.peek) == 0 { + return p.Push(p.Next()) + } + return p.peek[len(p.peek)-1] +} + +func (p *ParseContext) Push(token *Token) *Token { + p.peek = append(p.peek, token) + return token +} + +func (p *ParseContext) pop() *Token { + end := len(p.peek) - 1 + token := p.peek[end] + p.peek = p.peek[0:end] + return token +} + +func (p *ParseContext) String() string { + if p.SelectedCommand == nil { + return "" + } + return p.SelectedCommand.FullCommand() +} + +func (p *ParseContext) matchedFlag(flag *Clause, value string) { + p.Elements = append(p.Elements, &ParseElement{OneOf: OneOfClause{Flag: flag}, Value: &value}) +} + +func (p *ParseContext) matchedArg(arg *Clause, value string) { + p.Elements = append(p.Elements, &ParseElement{OneOf: OneOfClause{Arg: arg}, Value: &value}) +} + +func (p *ParseContext) matchedCmd(cmd *CmdClause) { + p.Elements = append(p.Elements, &ParseElement{OneOf: OneOfClause{Cmd: cmd}}) + p.mergeFlags(cmd.flagGroup) + p.mergeArgs(cmd.argGroup) + p.SelectedCommand = cmd +} + +// Expand arguments from a file. Lines starting with # will be treated as comments. +func ExpandArgsFromFile(filename string) (out []string, err error) { + r, err := os.Open(filename) + if err != nil { + return nil, err + } + defer r.Close() + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, "#") { + continue + } + out = append(out, line) + } + err = scanner.Err() + return +} + +func parse(context *ParseContext, app *Application) (err error) { // nolint: gocyclo + context.mergeFlags(app.flagGroup) + context.mergeArgs(app.argGroup) + + cmds := app.cmdGroup + ignoreDefault := context.ignoreDefault + +loop: + for !context.EOL() { + token := context.Peek() + + switch token.Type { + case TokenLong, TokenShort: + if flag, err := context.flags.parse(context); err != nil { + if !ignoreDefault { + if cmd := cmds.defaultSubcommand(); cmd != nil { + cmd.completionAlts = cmds.cmdNames() + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + break + } + } + return err + } else if flag == app.helpFlag { + ignoreDefault = true + } + + case TokenArg: + if context.arguments.have() { + if app.noInterspersed { + // no more flags + context.argsOnly = true + } + arg := context.nextArg() + if arg != nil { + context.matchedArg(arg, token.String()) + context.Next() + continue + } + } + + if cmds.have() { + selectedDefault := false + cmd, ok := cmds.commands[token.String()] + if !ok { + if !ignoreDefault { + if cmd = cmds.defaultSubcommand(); cmd != nil { + cmd.completionAlts = cmds.cmdNames() + selectedDefault = true + } + } + if cmd == nil { + return TError("expected command but got {{.Arg0}}", V{"Arg0": token}) + } + } + if cmd == app.helpCommand { + ignoreDefault = true + } + cmd.completionAlts = nil + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + if !selectedDefault { + context.Next() + } + continue + } + + break loop + + case TokenEOL: + break loop + } + } + + // Move to innermost default command. + for !ignoreDefault { + if cmd := cmds.defaultSubcommand(); cmd != nil { + cmd.completionAlts = cmds.cmdNames() + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + } else { + break + } + } + + if !context.EOL() { + return TError("unexpected '{{.Arg0}}'", V{"Arg0": context.Peek()}) + } + + // Set defaults for all remaining args. + for arg := context.nextArg(); arg != nil && !arg.consumesRemainder(); arg = context.nextArg() { + for _, defaultValue := range arg.defaultValues { + if err := arg.value.Set(defaultValue); err != nil { + return TError("invalid default value '{{.Arg0}}' for argument '{{.Arg1}}'", V{"Arg0": defaultValue, "Arg1": arg.name}) + } + } + } + + return +} diff --git a/vendor/github.com/alecthomas/kingpin/resolver.go b/vendor/github.com/alecthomas/kingpin/resolver.go new file mode 100644 index 0000000000..b806494028 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/resolver.go @@ -0,0 +1,157 @@ +package kingpin + +import ( + "encoding/json" + "fmt" + "os" + "regexp" + "strings" +) + +var ( + envarTransformRegexp = regexp.MustCompile(`[^a-zA-Z_]+`) +) + +// A Resolver retrieves flag values from an external source, such as a configuration file or environment variables. +type Resolver interface { + // Resolve key in the given parse context. + // + // A nil slice should be returned if the key can not be resolved. + Resolve(key string, context *ParseContext) ([]string, error) +} + +// ResolverFunc is a function that is also a Resolver. +type ResolverFunc func(key string, context *ParseContext) ([]string, error) + +func (r ResolverFunc) Resolve(key string, context *ParseContext) ([]string, error) { + return r(key, context) +} + +// A resolver that pulls values from the flag defaults. This resolver is always installed in the ParseContext. +func defaultsResolver() Resolver { + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + for _, clause := range context.CombinedFlagsAndArgs() { + if clause.name == key { + return clause.defaultValues, nil + } + } + return nil, nil + }) +} + +func parseEnvar(envar, sep string) []string { + value, ok := os.LookupEnv(envar) + if !ok { + return nil + } + if sep == "" { + return []string{value} + } + return strings.Split(value, sep) +} + +// Resolves a clause value from the envar configured on that clause, if any. +func envarResolver(sep string) Resolver { + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + for _, clause := range context.CombinedFlagsAndArgs() { + if key == clause.name { + if clause.noEnvar || clause.envar == "" { + return nil, nil + } + return parseEnvar(clause.envar, sep), nil + } + } + return nil, nil + }) +} + +// MapResolver resolves values from a static map. +func MapResolver(values map[string][]string) Resolver { + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + return values[key], nil + }) +} + +// JSONResolver returns a Resolver that retrieves values from a JSON source. +func JSONResolver(data []byte) (Resolver, error) { + values := map[string]interface{}{} + err := json.Unmarshal(data, &values) + if err != nil { + return nil, err + } + mapping := map[string][]string{} + for key, value := range values { + sub, err := jsonDecodeValue(value) + if err != nil { + return nil, err + } + mapping[key] = sub + } + return MapResolver(mapping), nil +} + +func jsonDecodeValue(value interface{}) ([]string, error) { + switch v := value.(type) { + case []interface{}: + out := []string{} + for _, sv := range v { + next, err := jsonDecodeValue(sv) + if err != nil { + return nil, err + } + out = append(out, next...) + } + return out, nil + case string: + return []string{v}, nil + case float64: + return []string{fmt.Sprintf("%v", v)}, nil + case bool: + if v { + return []string{"true"}, nil + } + return []string{"false"}, nil + } + return nil, fmt.Errorf("unsupported JSON value %v (of type %T)", value, value) +} + +// RenamingResolver creates a resolver for remapping names for a child resolver. +// +// This is useful if your configuration file uses a naming convention that does not map directly to +// flag names. +func RenamingResolver(resolver Resolver, rename func(string) string) Resolver { + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + return resolver.Resolve(rename(key), context) + }) +} + +// PrefixedEnvarResolver resolves any flag/argument via environment variables. +// +// "prefix" is the common-prefix for the environment variables. "separator", is the character used to separate +// multiple values within a single envar (eg. ";") +// +// With a prefix of APP_, flags in the form --some-flag will be transformed to APP_SOME_FLAG. +func PrefixedEnvarResolver(prefix, separator string) Resolver { + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + key = envarTransform(prefix + key) + return parseEnvar(key, separator), nil + }) +} + +// DontResolve returns a Resolver that will never return values for the given keys, even if provided. +func DontResolve(resolver Resolver, keys ...string) Resolver { + disabled := map[string]bool{} + for _, key := range keys { + disabled[key] = true + } + return ResolverFunc(func(key string, context *ParseContext) ([]string, error) { + if disabled[key] { + return nil, nil + } + return resolver.Resolve(key, context) + }) +} + +func envarTransform(name string) string { + return strings.ToUpper(envarTransformRegexp.ReplaceAllString(name, "_")) +} diff --git a/vendor/github.com/alecthomas/kingpin/struct.go b/vendor/github.com/alecthomas/kingpin/struct.go new file mode 100644 index 0000000000..e4773b8f91 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/struct.go @@ -0,0 +1,196 @@ +package kingpin + +import ( + "fmt" + "reflect" + "strings" + "time" + "unicode/utf8" +) + +func (c *cmdMixin) fromStruct(clause *CmdClause, v interface{}) error { // nolint: gocyclo + urv := reflect.ValueOf(v) + rv := reflect.Indirect(reflect.ValueOf(v)) + if rv.Kind() != reflect.Struct { + return fmt.Errorf("expected a struct but received " + reflect.TypeOf(v).String()) + } + for i := 0; i < rv.NumField(); i++ { + // Parse out tags + field := rv.Field(i) + ft := rv.Type().Field(i) + if strings.ToLower(ft.Name[0:1]) == ft.Name[0:1] { + continue + } + tag := ft.Tag + help := tag.Get("help") + if help == "" { + help = tag.Get("description") + } + placeholder := tag.Get("placeholder") + if placeholder == "" { + placeholder = tag.Get("value-name") + } + dflt := tag.Get("default") + short := tag.Get("short") + required := tag.Get("required") + hidden := tag.Get("hidden") + env := tag.Get("env") + enum := tag.Get("enum") + name := strings.ToLower(strings.Join(camelCase(ft.Name), "-")) + if tag.Get("long") != "" { + name = tag.Get("long") + } + arg := tag.Get("arg") + + var action Action + onMethodName := "On" + strings.ToUpper(ft.Name[0:1]) + ft.Name[1:] + if actionMethod := urv.MethodByName(onMethodName); actionMethod.IsValid() { + action, _ = actionMethod.Interface().(func(*ParseElement, *ParseContext) error) + } + + if field.Kind() == reflect.Struct { + if ft.Anonymous { + if err := c.fromStruct(clause, field.Addr().Interface()); err != nil { + return err + } + } else { + cmd := c.addCommand(name, help) + cmd.parent = clause + if hidden != "" { + cmd = cmd.Hidden() + } + if err := cmd.Struct(field.Addr().Interface()); err != nil { + return err + } + } + continue + } + + // Define flag using extracted tags + var clause *Clause + if arg != "" { + clause = c.Arg(name, help) + } else { + clause = c.Flag(name, help) + } + if action != nil { + clause.Action(action) + } + if dflt != "" { + clause = clause.Default(dflt) + } + if short != "" { + r, _ := utf8.DecodeRuneInString(short) + if r == utf8.RuneError { + return fmt.Errorf("invalid short flag %s", short) + } + clause = clause.Short(r) + } + if required != "" { + clause = clause.Required() + } + if hidden != "" { + clause = clause.Hidden() + } + if placeholder != "" { + clause = clause.PlaceHolder(placeholder) + } + if env != "" { + clause = clause.Envar(env) + } + ptr := field.Addr().Interface() + if ft.Type == reflect.TypeOf(time.Duration(0)) { + clause.DurationVar(ptr.(*time.Duration)) + } else { + switch ft.Type.Kind() { + case reflect.String: + if enum != "" { + clause.EnumVar(ptr.(*string), strings.Split(enum, ",")...) + } else { + clause.StringVar(ptr.(*string)) + } + + case reflect.Bool: + clause.BoolVar(ptr.(*bool)) + + case reflect.Float32: + clause.Float32Var(ptr.(*float32)) + case reflect.Float64: + clause.Float64Var(ptr.(*float64)) + + case reflect.Int: + clause.IntVar(ptr.(*int)) + case reflect.Int8: + clause.Int8Var(ptr.(*int8)) + case reflect.Int16: + clause.Int16Var(ptr.(*int16)) + case reflect.Int32: + clause.Int32Var(ptr.(*int32)) + case reflect.Int64: + clause.Int64Var(ptr.(*int64)) + + case reflect.Uint: + clause.UintVar(ptr.(*uint)) + case reflect.Uint8: + clause.Uint8Var(ptr.(*uint8)) + case reflect.Uint16: + clause.Uint16Var(ptr.(*uint16)) + case reflect.Uint32: + clause.Uint32Var(ptr.(*uint32)) + case reflect.Uint64: + clause.Uint64Var(ptr.(*uint64)) + + case reflect.Slice: + if ft.Type == reflect.TypeOf(time.Duration(0)) { + clause.DurationListVar(ptr.(*[]time.Duration)) + } else { + switch ft.Type.Elem().Kind() { + case reflect.String: + if enum != "" { + clause.EnumsVar(field.Addr().Interface().(*[]string), strings.Split(enum, ",")...) + } else { + clause.StringsVar(field.Addr().Interface().(*[]string)) + } + + case reflect.Bool: + clause.BoolListVar(field.Addr().Interface().(*[]bool)) + + case reflect.Float32: + clause.Float32ListVar(ptr.(*[]float32)) + case reflect.Float64: + clause.Float64ListVar(ptr.(*[]float64)) + + case reflect.Int: + clause.IntsVar(field.Addr().Interface().(*[]int)) + case reflect.Int8: + clause.Int8ListVar(ptr.(*[]int8)) + case reflect.Int16: + clause.Int16ListVar(ptr.(*[]int16)) + case reflect.Int32: + clause.Int32ListVar(ptr.(*[]int32)) + case reflect.Int64: + clause.Int64ListVar(ptr.(*[]int64)) + + case reflect.Uint: + clause.UintsVar(ptr.(*[]uint)) + case reflect.Uint8: + clause.HexBytesVar(ptr.(*[]byte)) + case reflect.Uint16: + clause.Uint16ListVar(ptr.(*[]uint16)) + case reflect.Uint32: + clause.Uint32ListVar(ptr.(*[]uint32)) + case reflect.Uint64: + clause.Uint64ListVar(ptr.(*[]uint64)) + + default: + return fmt.Errorf("unsupported field type %s for field %s", ft.Type.String(), ft.Name) + } + } + + default: + return fmt.Errorf("unsupported field type %s for field %s", ft.Type.String(), ft.Name) + } + } + } + return nil +} diff --git a/vendor/github.com/alecthomas/kingpin/templates.go b/vendor/github.com/alecthomas/kingpin/templates.go new file mode 100644 index 0000000000..ce5ef3a64a --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/templates.go @@ -0,0 +1,171 @@ +package kingpin + +// DefaultUsageTemplate is the default usage template. +var DefaultUsageTemplate = `{{define "FormatCommands" -}} +{{range .FlattenedCommands -}} +{{if not .Hidden}} + {{.CmdSummary}} +{{.Help|Wrap 4}} +{{if .Flags -}} +{{with .Flags|FlagsToTwoColumns}}{{FormatTwoColumnsWithIndent . 4 2}}{{end}} +{{end -}} +{{end -}} +{{end -}} +{{end -}} + +{{define "FormatUsage" -}} +{{.AppSummary}} +{{if .Help}} +{{.Help|Wrap 0 -}} +{{end -}} + +{{end -}} + +{{if .Context.SelectedCommand -}} +{{T "usage:"}} {{.App.Name}} {{.App.FlagSummary}} {{.Context.SelectedCommand.CmdSummary}} +{{else}} +{{T "usage:"}} {{template "FormatUsage" .App}} +{{end}} +{{if .Context.Flags -}} +{{T "Flags:"}} +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.Args -}} +{{T "Args:"}} +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.SelectedCommand -}} +{{if len .Context.SelectedCommand.Commands -}} +{{T "Subcommands:"}} +{{template "FormatCommands" .Context.SelectedCommand}} +{{end -}} +{{else if .App.Commands -}} +{{T "Commands:" -}} +{{template "FormatCommands" .App}} +{{end -}} +` + +// CompactUsageTemplate is a template with compactly formatted commands for large command structures. +var CompactUsageTemplate = `{{define "FormatCommand" -}} +{{if .FlagSummary}} {{.FlagSummary}}{{end -}} +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}} ...{{end}}{{if not .Required}}]{{end}}{{end -}} +{{end -}} + +{{define "FormatCommandList" -}} +{{range . -}} +{{if not .Hidden -}} +{{.Depth|Indent}}{{.Name}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} +{{end -}} +{{template "FormatCommandList" .Commands -}} +{{end -}} +{{end -}} + +{{define "FormatUsage" -}} +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} +{{if .Help}} +{{.Help|Wrap 0 -}} +{{end -}} + +{{end -}} + +{{if .Context.SelectedCommand -}} +{{T "usage:"}} {{.App.Name}} {{template "FormatUsage" .Context.SelectedCommand}} +{{else -}} +{{T "usage:"}} {{.App.Name}}{{template "FormatUsage" .App}} +{{end -}} +{{if .Context.Flags -}} +{{T "Flags:"}} +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.Args -}} +{{T "Args:"}} +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.SelectedCommand -}} +{{if .Context.SelectedCommand.Commands -}} +{{T "Commands:"}} + {{.Context.SelectedCommand}} +{{.Context.SelectedCommand.Commands|CommandsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{else if .App.Commands -}} +{{T "Commands:"}} +{{.App.Commands|CommandsToTwoColumns|FormatTwoColumns}} +{{end -}} +` + +var ManPageTemplate = `{{define "FormatFlags" -}} +{{range .Flags -}} +{{if not .Hidden -}} +.TP +\fB{{if .Short}}-{{.Short|Char}}, {{end}}--{{.Name}}{{if not .IsBoolFlag}}={{.FormatPlaceHolder}}{{end}}\fR +{{.Help}} +{{end -}} +{{end -}} +{{end -}} + +{{define "FormatCommand" -}} +{{end -}} + +{{define "FormatCommands" -}} +{{range .FlattenedCommands -}} +{{if not .Hidden -}} +.SS +\fB{{.CmdSummary}}\fR +.PP +{{.Help}} +{{template "FormatFlags" . -}} +{{end -}} +{{end -}} +{{end -}} + +{{define "FormatUsage" -}} +{{if .FlagSummary}} {{.FlagSummary}}{{end -}} +{{if .Commands}} [ ...]{{end}}\fR +{{end -}} + +.TH {{.App.Name}} 1 {{.App.Version}} "{{.App.Author}}" +.SH "NAME" +{{.App.Name}} +.SH "SYNOPSIS" +.TP +\fB{{.App.Name}}{{template "FormatUsage" .App}} +.SH "DESCRIPTION" +{{.App.Help}} +.SH "OPTIONS" +{{template "FormatFlags" .App -}} +{{if .App.Commands -}} +.SH "COMMANDS" +{{template "FormatCommands" .App -}} +{{end -}} +` + +var BashCompletionTemplate = ` +_{{.App.Name}}_bash_autocomplete() { + local cur prev opts base + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + return 0 +} +complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} + +` + +var ZshCompletionTemplate = ` +#compdef {{.App.Name}} +autoload -U compinit && compinit +autoload -U bashcompinit && bashcompinit + +_{{.App.Name}}_bash_autocomplete() { + local cur prev opts base + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + [[ $COMPREPLY ]] && return + compgen -f + return 0 +} +complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} +` diff --git a/vendor/github.com/alecthomas/kingpin/usage.go b/vendor/github.com/alecthomas/kingpin/usage.go new file mode 100644 index 0000000000..a8ada70f1a --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/usage.go @@ -0,0 +1,279 @@ +package kingpin + +import ( + "bytes" + "fmt" + "go/doc" + "io" + "strings" + "text/template" +) + +var ( + preIndent = " " +) + +// UsageContext contains all of the context used to render a usage message. +type UsageContext struct { + // The text/template body to use. + Template string + // Indentation multiplier (defaults to 2 of omitted). + Indent int + // Width of wrap. Defaults wraps to the terminal. + Width int + // Funcs available in the template. + Funcs template.FuncMap + // Vars available in the template. + Vars map[string]interface{} +} + +func formatTwoColumns(w io.Writer, indent, padding, width int, rows [][2]string) { + // Find size of first column. + s := 0 + for _, row := range rows { + if c := len(row[0]); c > s && c < 30 { + s = c + } + } + + indentStr := strings.Repeat(" ", indent) + offsetStr := strings.Repeat(" ", s+padding) + + for _, row := range rows { + buf := bytes.NewBuffer(nil) + doc.ToText(buf, row[1], "", preIndent, width-s-padding-indent) + lines := strings.Split(strings.TrimRight(buf.String(), "\n"), "\n") + fmt.Fprintf(w, "%s%-*s%*s", indentStr, s, row[0], padding, "") + if len(row[0]) >= 30 { + fmt.Fprintf(w, "\n%s%s", indentStr, offsetStr) + } + fmt.Fprintf(w, "%s\n", lines[0]) + for _, line := range lines[1:] { + fmt.Fprintf(w, "%s%s%s\n", indentStr, offsetStr, line) + } + } +} + +// Usage writes application usage to Writer. It parses args to determine +// appropriate help context, such as which command to show help for. +func (a *Application) Usage(args []string) { + context, err := a.parseContext(true, args) + a.FatalIfError(err, "") + if err := a.UsageForContextWithTemplate(a.defaultUsage, context); err != nil { + panic(err) + } +} + +func formatAppUsage(app *ApplicationModel) string { + s := []string{app.Name} + if len(app.Flags) > 0 { + s = append(s, app.FlagSummary()) + } + if len(app.Args) > 0 { + s = append(s, app.ArgSummary()) + } + return strings.Join(s, " ") +} + +func formatCmdUsage(app *ApplicationModel, cmd *CmdModel) string { + s := []string{app.Name, cmd.String()} + if len(app.Flags) > 0 { + s = append(s, app.FlagSummary()) + } + if len(app.Args) > 0 { + s = append(s, app.ArgSummary()) + } + return strings.Join(s, " ") +} + +// haveShort will be true if there are short flags present at all in the help. Useful for column alignment. +func formatFlag(haveShort bool, flag *ClauseModel) string { + flagString := "" + name := flag.Name + isBool := flag.IsBoolFlag() + if flag.IsNegatable() { + name = "[no-]" + name + } + if flag.Short != 0 { + flagString += fmt.Sprintf("-%c, --%s", flag.Short, name) + } else { + if haveShort { + flagString += fmt.Sprintf(" --%s", name) + } else { + flagString += fmt.Sprintf("--%s", name) + } + } + if !isBool { + flagString += fmt.Sprintf("=%s", flag.FormatPlaceHolder()) + } + if v, ok := flag.Value.(cumulativeValue); ok && v.IsCumulative() { + flagString += " ..." + } + return flagString +} + +type templateParseContext struct { + SelectedCommand *CmdModel + *FlagGroupModel + *ArgGroupModel +} + +// UsageForContext displays usage information from a ParseContext (obtained from +// Application.ParseContext() or Action(f) callbacks). +func (a *Application) UsageForContext(context *ParseContext) error { + return a.UsageForContextWithTemplate(a.defaultUsage, context) +} + +// UsageForContextWithTemplate is for fine-grained control over usage messages. You generally don't +// need to use this. +func (a *Application) UsageForContextWithTemplate(usageContext *UsageContext, parseContext *ParseContext) error { // nolint: gocyclo + indent := usageContext.Indent + if indent == 0 { + indent = 2 + } + width := usageContext.Width + if width == 0 { + width = guessWidth(a.output) + } + tmpl := usageContext.Template + if tmpl == "" { + tmpl = a.defaultUsage.Template + if tmpl == "" { + tmpl = DefaultUsageTemplate + } + } + funcs := template.FuncMap{ + "T": T, + "Indent": func(level int) string { + return strings.Repeat(" ", level*indent) + }, + "Wrap": func(indent int, s string) string { + buf := bytes.NewBuffer(nil) + indentText := strings.Repeat(" ", indent) + doc.ToText(buf, s, indentText, " "+indentText, width-indent) + return buf.String() + }, + "FormatFlag": formatFlag, + "FlagsToTwoColumns": func(f []*ClauseModel) [][2]string { + rows := [][2]string{} + haveShort := false + for _, flag := range f { + if flag.Short != 0 { + haveShort = true + break + } + } + for _, flag := range f { + if !flag.Hidden { + rows = append(rows, [2]string{formatFlag(haveShort, flag), flag.Help}) + } + } + return rows + }, + "RequiredFlags": func(f []*ClauseModel) []*ClauseModel { + requiredFlags := []*ClauseModel{} + for _, flag := range f { + if flag.Required { + requiredFlags = append(requiredFlags, flag) + } + } + return requiredFlags + }, + "OptionalFlags": func(f []*ClauseModel) []*ClauseModel { + optionalFlags := []*ClauseModel{} + for _, flag := range f { + if !flag.Required { + optionalFlags = append(optionalFlags, flag) + } + } + return optionalFlags + }, + "ArgsToTwoColumns": func(a []*ClauseModel) [][2]string { + rows := [][2]string{} + for _, arg := range a { + s := "<" + arg.Name + ">" + if !arg.Required { + s = "[" + s + "]" + } + rows = append(rows, [2]string{s, arg.Help}) + } + return rows + }, + "CommandsToTwoColumns": func(c []*CmdModel) [][2]string { + return commandsToColumns(indent, c) + }, + "FormatTwoColumns": func(rows [][2]string) string { + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, indent, indent, width, rows) + return buf.String() + }, + "FormatTwoColumnsWithIndent": func(rows [][2]string, indent, padding int) string { + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, indent, padding, width, rows) + return buf.String() + }, + "FormatAppUsage": formatAppUsage, + "FormatCommandUsage": formatCmdUsage, + "IsCumulative": func(value Value) bool { + r, ok := value.(cumulativeValue) + return ok && r.IsCumulative() + }, + "Char": func(c rune) string { + return string(c) + }, + } + for name, fn := range usageContext.Funcs { + funcs[name] = fn + } + t, err := template.New("usage").Funcs(funcs).Parse(tmpl) + if err != nil { + return err + } + appModel := a.Model() + var selectedCommand *CmdModel + if parseContext.SelectedCommand != nil { + selectedCommand = appModel.FindModelForCommand(parseContext.SelectedCommand) + } + ctx := map[string]interface{}{ + "App": appModel, + "Width": width, + "Context": &templateParseContext{ + SelectedCommand: selectedCommand, + FlagGroupModel: parseContext.flags.Model(), + ArgGroupModel: parseContext.arguments.Model(), + }, + } + for k, v := range usageContext.Vars { + ctx[k] = v + } + return t.Execute(a.output, ctx) +} + +func commandsToColumns(indent int, cmds []*CmdModel) [][2]string { + out := [][2]string{} + for _, cmd := range cmds { + if cmd.Hidden { + continue + } + left := cmd.Name + if cmd.FlagSummary() != "" { + left += " " + cmd.FlagSummary() + } + args := []string{} + for _, arg := range cmd.Args { + if arg.Required { + argText := "<" + arg.Name + ">" + if _, ok := arg.Value.(cumulativeValue); ok { + argText += " ..." + } + args = append(args, argText) + } + } + if len(args) != 0 { + left += " " + strings.Join(args, " ") + } + out = append(out, [2]string{strings.Repeat(" ", cmd.Depth*indent-1) + left, cmd.Help}) + out = append(out, commandsToColumns(indent, cmd.Commands)...) + } + return out +} diff --git a/vendor/github.com/alecthomas/kingpin/values.go b/vendor/github.com/alecthomas/kingpin/values.go new file mode 100644 index 0000000000..9b1847fc5a --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/values.go @@ -0,0 +1,460 @@ +package kingpin + +//go:generate go run ./cmd/genvalues/main.go + +import ( + "fmt" + "net" + "os" + "reflect" + "regexp" + "strings" + "time" + + "github.com/alecthomas/units" +) + +// NOTE: Most of the base type values were lifted from: +// http://golang.org/src/pkg/flag/flag.go?s=20146:20222 + +// Value is the interface to the dynamic value stored in a flag. +// (The default value is represented as a string.) +// +// If a Value has an IsBoolFlag() bool method returning true, the command-line +// parser makes --name equivalent to -name=true rather than using the next +// command-line argument, and adds a --no-name counterpart for negating the +// flag. +type Value interface { + String() string + Set(string) error +} + +// Getter is an interface that allows the contents of a Value to be retrieved. +// It wraps the Value interface, rather than being part of it, because it +// appeared after Go 1 and its compatibility rules. All Value types provided +// by this package satisfy the Getter interface. +type Getter interface { + Value + Get() interface{} +} + +// Optional interface to indicate boolean flags that don't accept a value, and +// implicitly have a --no- negation counterpart. +// +// This is for compatibility with the stdlib. +type boolFlag interface { + Value + IsBoolFlag() bool +} + +// BoolFlag is an optional interface to specify that a flag is a boolean flag. +type BoolFlag interface { + // Specify if the flag is negatable (ie. supports both --no- and --name). + BoolFlagIsNegatable() bool +} + +// Optional interface for values that cumulatively consume all remaining +// input. +type cumulativeValue interface { + Value + Reset() + IsCumulative() bool +} + +type accumulatorOptions struct { + separator string +} + +func (a *accumulatorOptions) split(value string) []string { + if a.separator == "" { + return []string{value} + } + return strings.Split(value, a.separator) +} + +func newAccumulatorOptions(options ...AccumulatorOption) *accumulatorOptions { + out := &accumulatorOptions{} + for _, option := range options { + option(out) + } + return out +} + +// AccumulatorOption are used to modify the behaviour of values that accumulate into slices, maps, etc. +// +// eg. Separator(',') +type AccumulatorOption func(a *accumulatorOptions) + +// Separator configures an accumulating value to split on this value. +func Separator(separator string) AccumulatorOption { + return func(a *accumulatorOptions) { + a.separator = separator + } +} + +type accumulator struct { + element func(value interface{}) Value + typ reflect.Type + slice reflect.Value + accumulatorOptions +} + +func isBoolFlag(f Value) bool { + if bf, ok := f.(boolFlag); ok { + return bf.IsBoolFlag() + } + _, ok := f.(BoolFlag) + return ok +} + +// Use reflection to accumulate values into a slice. +// +// target := []string{} +// newAccumulator(&target, func (value interface{}) Value { +// return newStringValue(value.(*string)) +// }) +func newAccumulator(slice interface{}, options []AccumulatorOption, element func(value interface{}) Value) *accumulator { + typ := reflect.TypeOf(slice) + if typ.Kind() != reflect.Ptr || typ.Elem().Kind() != reflect.Slice { + panic(T("expected a pointer to a slice")) + } + return &accumulator{ + element: element, + typ: typ.Elem().Elem(), + slice: reflect.ValueOf(slice), + accumulatorOptions: *newAccumulatorOptions(options...), + } +} + +func (a *accumulator) String() string { + out := []string{} + s := a.slice.Elem() + for i := 0; i < s.Len(); i++ { + out = append(out, a.element(s.Index(i).Addr().Interface()).String()) + } + return strings.Join(out, ",") +} + +func (a *accumulator) Set(value string) error { + values := []string{} + if a.separator == "" { + values = append(values, value) + } else { + values = append(values, strings.Split(value, a.separator)...) + } + for _, v := range values { + e := reflect.New(a.typ) + if err := a.element(e.Interface()).Set(v); err != nil { + return err + } + slice := reflect.Append(a.slice.Elem(), e.Elem()) + a.slice.Elem().Set(slice) + } + return nil +} + +func (a *accumulator) Get() interface{} { + return a.slice.Interface() +} + +func (a *accumulator) IsCumulative() bool { + return true +} + +func (a *accumulator) Reset() { + if a.slice.Kind() == reflect.Ptr { + a.slice.Elem().Set(reflect.MakeSlice(a.slice.Type().Elem(), 0, 0)) + } else { + a.slice.Set(reflect.MakeSlice(a.slice.Type(), 0, 0)) + } +} + +func (b *boolValue) BoolFlagIsNegatable() bool { return false } + +// -- A boolean flag that can not be negated. +func (n *negatableBoolValue) BoolFlagIsNegatable() bool { return true } + +// -- map[string]string Value +type stringMapValue struct { + values *map[string]string + accumulatorOptions +} + +func newStringMapValue(p *map[string]string, options ...AccumulatorOption) *stringMapValue { + return &stringMapValue{ + values: p, + accumulatorOptions: *newAccumulatorOptions(options...), + } +} + +var stringMapRegex = regexp.MustCompile("[:=]") + +func (s *stringMapValue) Set(value string) error { + values := []string{} + if s.separator == "" { + values = append(values, value) + } else { + values = append(values, strings.Split(value, s.separator)...) + } + for _, v := range values { + parts := stringMapRegex.Split(v, 2) + if len(parts) != 2 { + return TError("expected KEY=VALUE got '{{.Arg0}}'", V{"Arg0": v}) + } + (*s.values)[parts[0]] = parts[1] + } + return nil +} + +func (s *stringMapValue) Get() interface{} { + return *s.values +} + +func (s *stringMapValue) String() string { + return fmt.Sprintf("%s", *s.values) +} + +func (s *stringMapValue) IsCumulative() bool { + return true +} + +func (s *stringMapValue) Reset() { + *s.values = map[string]string{} +} + +// -- existingFile Value + +type fileStatValue struct { + path *string + predicate func(os.FileInfo) error +} + +func newFileStatValue(p *string, predicate func(os.FileInfo) error) *fileStatValue { + return &fileStatValue{ + path: p, + predicate: predicate, + } +} + +func (f *fileStatValue) Set(value string) error { + if s, err := os.Stat(value); os.IsNotExist(err) { + return TError("path '{{.Arg0}}' does not exist", V{"Arg0": value}) + } else if err != nil { + return err + } else if err := f.predicate(s); err != nil { + return err + } + *f.path = value + return nil +} + +func (f *fileStatValue) Get() interface{} { + return (string)(*f.path) +} + +func (f *fileStatValue) String() string { + return *f.path +} + +// -- net.IP Value +type ipValue net.IP + +func newIPValue(p *net.IP) *ipValue { + return (*ipValue)(p) +} + +func (i *ipValue) Set(value string) error { + ip := net.ParseIP(value) + if ip == nil { + return fmt.Errorf("'%s' is not an IP address", value) + } + *i = *(*ipValue)(&ip) + return nil +} + +func (i *ipValue) Get() interface{} { + return (net.IP)(*i) +} + +func (i *ipValue) String() string { + return (*net.IP)(i).String() +} + +// A flag whose value must be in a set of options. +type enumValue struct { + value *string + options []string +} + +func newEnumFlag(target *string, options ...string) *enumValue { + return &enumValue{ + value: target, + options: options, + } +} + +func (e *enumValue) String() string { + return *e.value +} + +func (e *enumValue) Set(value string) error { + for _, v := range e.options { + if v == value { + *e.value = value + return nil + } + } + return TError("enum value must be one of {{.Arg0}}, got '{{.Arg1}}'", V{"Arg0": strings.Join(e.options, T(",")), "Arg1": value}) +} + +func (e *enumValue) Get() interface{} { + return (string)(*e.value) +} + +// -- []string Enum Value +type enumsValue struct { + value *[]string + options []string + accumulatorOptions +} + +func newEnumsFlag(target *[]string, options ...string) *enumsValue { + return &enumsValue{ + value: target, + options: options, + } +} + +func (e *enumsValue) Set(value string) error { +nextValue: + for _, v := range e.split(value) { + for _, o := range e.options { + if o == v { + *e.value = append(*e.value, v) + continue nextValue + } + } + return TError("enum value must be one of {{.Arg0}}, got '{{.Arg1}}'", V{"Arg0": strings.Join(e.options, T(",")), "Arg1": v}) + } + return nil +} + +func (e *enumsValue) Get() interface{} { + return ([]string)(*e.value) +} + +func (e *enumsValue) String() string { + return strings.Join(*e.value, ",") +} + +func (e *enumsValue) IsCumulative() bool { + return true +} + +func (e *enumsValue) Reset() { + *e.value = []string{} +} + +// -- units.Base2Bytes Value +type bytesValue units.Base2Bytes + +func newBytesValue(p *units.Base2Bytes) *bytesValue { + return (*bytesValue)(p) +} + +func (d *bytesValue) Set(s string) error { + v, err := units.ParseBase2Bytes(s) + *d = bytesValue(v) + return err +} + +func (d *bytesValue) Get() interface{} { return units.Base2Bytes(*d) } + +func (d *bytesValue) String() string { return (*units.Base2Bytes)(d).String() } + +func newExistingFileValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { + if s.IsDir() { + return TError("'{{.Arg0}}' is a directory", V{"Arg0": s.Name()}) + } + return nil + }) +} + +func newExistingDirValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { + if !s.IsDir() { + return TError("'{{.Arg0}}' is a file", V{"Arg0": s.Name()}) + } + return nil + }) +} + +func newExistingFileOrDirValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { return nil }) +} + +type counterValue int + +func newCounterValue(n *int) *counterValue { + return (*counterValue)(n) +} + +func (c *counterValue) Set(s string) error { + *c++ + return nil +} + +func (c *counterValue) Get() interface{} { return (int)(*c) } +func (c *counterValue) IsBoolFlag() bool { return true } +func (c *counterValue) String() string { return fmt.Sprintf("%d", *c) } +func (c *counterValue) IsCumulative() bool { return true } +func (c *counterValue) Reset() { *c = 0 } + +// -- time.Time Value +type timeValue struct { + format string + v *time.Time +} + +func newTimeValue(format string, p *time.Time) *timeValue { + return &timeValue{format, p} +} + +func (f *timeValue) Set(s string) error { + v, err := time.Parse(f.format, s) + if err == nil { + *f.v = (time.Time)(v) + } + return err +} + +func (f *timeValue) Get() interface{} { return (time.Time)(*f.v) } + +func (f *timeValue) String() string { return f.v.String() } + +// Time parses a time.Time. +// +// Format is the layout as specified at https://golang.org/pkg/time/#Parse +func (p *Clause) Time(format string) (target *time.Time) { + target = new(time.Time) + p.TimeVar(format, target) + return +} + +func (p *Clause) TimeVar(format string, target *time.Time) { + p.SetValue(newTimeValue(format, target)) +} + +// TimeList accumulates time.Time values into a slice. +func (p *Clause) TimeList(format string, options ...AccumulatorOption) (target *[]time.Time) { + target = new([]time.Time) + p.TimeListVar(format, target, options...) + return +} + +func (p *Clause) TimeListVar(format string, target *[]time.Time, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newTimeValue(format, v.(*time.Time)) + })) +} diff --git a/vendor/github.com/alecthomas/kingpin/values.json b/vendor/github.com/alecthomas/kingpin/values.json new file mode 100644 index 0000000000..faf55b857f --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/values.json @@ -0,0 +1,25 @@ +[ + {"name": "Duration", "type": "time.Duration", "parser": "time.ParseDuration(s)", "help": "Time duration."}, + {"name": "ExistingDir", "type": "string", "plural": "ExistingDirs", "no_value_parser": true}, + {"name": "ExistingFile", "type": "string", "plural": "ExistingFiles", "no_value_parser": true}, + {"name": "ExistingFileOrDir", "type": "string", "plural": "ExistingFilesOrDirs", "no_value_parser": true}, + {"name": "HexBytes", "type": "[]byte", "parser": "hex.DecodeString(s)", "help": "Bytes as a hex string."}, + {"name": "IP", "type": "net.IP", "no_value_parser": true}, + {"name": "NegatableBool", "type": "bool", "parser": "strconv.ParseBool(s)"}, + {"name": "Regexp", "type": "*regexp.Regexp", "parser": "regexp.Compile(s)"}, + {"name": "URL", "type": "*url.URL", "parser": "url.Parse(s)"}, + {"type": "bool", "parser": "strconv.ParseBool(s)"}, + {"type": "float32", "parser": "strconv.ParseFloat(s, 32)"}, + {"type": "float64", "parser": "strconv.ParseFloat(s, 64)"}, + {"type": "int", "parser": "strconv.ParseFloat(s, 64)", "plural": "Ints"}, + {"type": "int16", "parser": "strconv.ParseInt(s, 0, 16)"}, + {"type": "int32", "parser": "strconv.ParseInt(s, 0, 32)"}, + {"type": "int64", "parser": "strconv.ParseInt(s, 0, 64)"}, + {"type": "int8", "parser": "strconv.ParseInt(s, 0, 8)"}, + {"type": "string", "parser": "s, error(nil)", "format": "string(*f.v)", "plural": "Strings"}, + {"type": "uint", "parser": "strconv.ParseUint(s, 0, 64)", "plural": "Uints"}, + {"type": "uint16", "parser": "strconv.ParseUint(s, 0, 16)"}, + {"type": "uint32", "parser": "strconv.ParseUint(s, 0, 32)"}, + {"type": "uint64", "parser": "strconv.ParseUint(s, 0, 64)"}, + {"type": "uint8", "parser": "strconv.ParseUint(s, 0, 8)"} +] diff --git a/vendor/github.com/alecthomas/kingpin/values_generated.go b/vendor/github.com/alecthomas/kingpin/values_generated.go new file mode 100644 index 0000000000..6c796d03f2 --- /dev/null +++ b/vendor/github.com/alecthomas/kingpin/values_generated.go @@ -0,0 +1,882 @@ +package kingpin + +import ( + "encoding/hex" + "fmt" + "net" + "net/url" + "regexp" + "strconv" + "time" +) + +// This file is autogenerated by "go generate .". Do not modify. + +// -- time.Duration Value +type durationValue struct{ v *time.Duration } + +func newDurationValue(p *time.Duration) *durationValue { + return &durationValue{p} +} + +func (f *durationValue) Set(s string) error { + v, err := time.ParseDuration(s) + if err == nil { + *f.v = (time.Duration)(v) + } + return err +} + +func (f *durationValue) Get() interface{} { return (time.Duration)(*f.v) } + +func (f *durationValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Time duration. +func (p *Clause) Duration() (target *time.Duration) { + target = new(time.Duration) + p.DurationVar(target) + return +} + +func (p *Clause) DurationVar(target *time.Duration) { + p.SetValue(newDurationValue(target)) +} + +// DurationList accumulates time.Duration values into a slice. +func (p *Clause) DurationList(options ...AccumulatorOption) (target *[]time.Duration) { + target = new([]time.Duration) + p.DurationListVar(target, options...) + return +} + +func (p *Clause) DurationListVar(target *[]time.Duration, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newDurationValue(v.(*time.Duration)) + })) +} + +// ExistingDirs accumulates string values into a slice. +func (p *Clause) ExistingDirs(options ...AccumulatorOption) (target *[]string) { + target = new([]string) + p.ExistingDirsVar(target, options...) + return +} + +func (p *Clause) ExistingDirsVar(target *[]string, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newExistingDirValue(v.(*string)) + })) +} + +// ExistingFiles accumulates string values into a slice. +func (p *Clause) ExistingFiles(options ...AccumulatorOption) (target *[]string) { + target = new([]string) + p.ExistingFilesVar(target, options...) + return +} + +func (p *Clause) ExistingFilesVar(target *[]string, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newExistingFileValue(v.(*string)) + })) +} + +// ExistingFilesOrDirs accumulates string values into a slice. +func (p *Clause) ExistingFilesOrDirs(options ...AccumulatorOption) (target *[]string) { + target = new([]string) + p.ExistingFilesOrDirsVar(target, options...) + return +} + +func (p *Clause) ExistingFilesOrDirsVar(target *[]string, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newExistingFileOrDirValue(v.(*string)) + })) +} + +// -- []byte Value +type hexBytesValue struct{ v *[]byte } + +func newHexBytesValue(p *[]byte) *hexBytesValue { + return &hexBytesValue{p} +} + +func (f *hexBytesValue) Set(s string) error { + v, err := hex.DecodeString(s) + if err == nil { + *f.v = ([]byte)(v) + } + return err +} + +func (f *hexBytesValue) Get() interface{} { return ([]byte)(*f.v) } + +func (f *hexBytesValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Bytes as a hex string. +func (p *Clause) HexBytes() (target *[]byte) { + target = new([]byte) + p.HexBytesVar(target) + return +} + +func (p *Clause) HexBytesVar(target *[]byte) { + p.SetValue(newHexBytesValue(target)) +} + +// HexBytesList accumulates []byte values into a slice. +func (p *Clause) HexBytesList(options ...AccumulatorOption) (target *[][]byte) { + target = new([][]byte) + p.HexBytesListVar(target, options...) + return +} + +func (p *Clause) HexBytesListVar(target *[][]byte, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newHexBytesValue(v.(*[]byte)) + })) +} + +// IPList accumulates net.IP values into a slice. +func (p *Clause) IPList(options ...AccumulatorOption) (target *[]net.IP) { + target = new([]net.IP) + p.IPListVar(target, options...) + return +} + +func (p *Clause) IPListVar(target *[]net.IP, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newIPValue(v.(*net.IP)) + })) +} + +// -- bool Value +type negatableBoolValue struct{ v *bool } + +func newNegatableBoolValue(p *bool) *negatableBoolValue { + return &negatableBoolValue{p} +} + +func (f *negatableBoolValue) Set(s string) error { + v, err := strconv.ParseBool(s) + if err == nil { + *f.v = (bool)(v) + } + return err +} + +func (f *negatableBoolValue) Get() interface{} { return (bool)(*f.v) } + +func (f *negatableBoolValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// NegatableBool parses the next command-line value as bool. +func (p *Clause) NegatableBool() (target *bool) { + target = new(bool) + p.NegatableBoolVar(target) + return +} + +func (p *Clause) NegatableBoolVar(target *bool) { + p.SetValue(newNegatableBoolValue(target)) +} + +// NegatableBoolList accumulates bool values into a slice. +func (p *Clause) NegatableBoolList(options ...AccumulatorOption) (target *[]bool) { + target = new([]bool) + p.NegatableBoolListVar(target, options...) + return +} + +func (p *Clause) NegatableBoolListVar(target *[]bool, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newNegatableBoolValue(v.(*bool)) + })) +} + +// -- *regexp.Regexp Value +type regexpValue struct{ v **regexp.Regexp } + +func newRegexpValue(p **regexp.Regexp) *regexpValue { + return ®expValue{p} +} + +func (f *regexpValue) Set(s string) error { + v, err := regexp.Compile(s) + if err == nil { + *f.v = (*regexp.Regexp)(v) + } + return err +} + +func (f *regexpValue) Get() interface{} { return (*regexp.Regexp)(*f.v) } + +func (f *regexpValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Regexp parses the next command-line value as *regexp.Regexp. +func (p *Clause) Regexp() (target **regexp.Regexp) { + target = new(*regexp.Regexp) + p.RegexpVar(target) + return +} + +func (p *Clause) RegexpVar(target **regexp.Regexp) { + p.SetValue(newRegexpValue(target)) +} + +// RegexpList accumulates *regexp.Regexp values into a slice. +func (p *Clause) RegexpList(options ...AccumulatorOption) (target *[]*regexp.Regexp) { + target = new([]*regexp.Regexp) + p.RegexpListVar(target, options...) + return +} + +func (p *Clause) RegexpListVar(target *[]*regexp.Regexp, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newRegexpValue(v.(**regexp.Regexp)) + })) +} + +// -- *url.URL Value +type uRLValue struct{ v **url.URL } + +func newURLValue(p **url.URL) *uRLValue { + return &uRLValue{p} +} + +func (f *uRLValue) Set(s string) error { + v, err := url.Parse(s) + if err == nil { + *f.v = (*url.URL)(v) + } + return err +} + +func (f *uRLValue) Get() interface{} { return (*url.URL)(*f.v) } + +func (f *uRLValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// URL parses the next command-line value as *url.URL. +func (p *Clause) URL() (target **url.URL) { + target = new(*url.URL) + p.URLVar(target) + return +} + +func (p *Clause) URLVar(target **url.URL) { + p.SetValue(newURLValue(target)) +} + +// URLList accumulates *url.URL values into a slice. +func (p *Clause) URLList(options ...AccumulatorOption) (target *[]*url.URL) { + target = new([]*url.URL) + p.URLListVar(target, options...) + return +} + +func (p *Clause) URLListVar(target *[]*url.URL, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newURLValue(v.(**url.URL)) + })) +} + +// -- bool Value +type boolValue struct{ v *bool } + +func newBoolValue(p *bool) *boolValue { + return &boolValue{p} +} + +func (f *boolValue) Set(s string) error { + v, err := strconv.ParseBool(s) + if err == nil { + *f.v = (bool)(v) + } + return err +} + +func (f *boolValue) Get() interface{} { return (bool)(*f.v) } + +func (f *boolValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Bool parses the next command-line value as bool. +func (p *Clause) Bool() (target *bool) { + target = new(bool) + p.BoolVar(target) + return +} + +func (p *Clause) BoolVar(target *bool) { + p.SetValue(newBoolValue(target)) +} + +// BoolList accumulates bool values into a slice. +func (p *Clause) BoolList(options ...AccumulatorOption) (target *[]bool) { + target = new([]bool) + p.BoolListVar(target, options...) + return +} + +func (p *Clause) BoolListVar(target *[]bool, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newBoolValue(v.(*bool)) + })) +} + +// -- float32 Value +type float32Value struct{ v *float32 } + +func newFloat32Value(p *float32) *float32Value { + return &float32Value{p} +} + +func (f *float32Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 32) + if err == nil { + *f.v = (float32)(v) + } + return err +} + +func (f *float32Value) Get() interface{} { return (float32)(*f.v) } + +func (f *float32Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Float32 parses the next command-line value as float32. +func (p *Clause) Float32() (target *float32) { + target = new(float32) + p.Float32Var(target) + return +} + +func (p *Clause) Float32Var(target *float32) { + p.SetValue(newFloat32Value(target)) +} + +// Float32List accumulates float32 values into a slice. +func (p *Clause) Float32List(options ...AccumulatorOption) (target *[]float32) { + target = new([]float32) + p.Float32ListVar(target, options...) + return +} + +func (p *Clause) Float32ListVar(target *[]float32, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newFloat32Value(v.(*float32)) + })) +} + +// -- float64 Value +type float64Value struct{ v *float64 } + +func newFloat64Value(p *float64) *float64Value { + return &float64Value{p} +} + +func (f *float64Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 64) + if err == nil { + *f.v = (float64)(v) + } + return err +} + +func (f *float64Value) Get() interface{} { return (float64)(*f.v) } + +func (f *float64Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Float64 parses the next command-line value as float64. +func (p *Clause) Float64() (target *float64) { + target = new(float64) + p.Float64Var(target) + return +} + +func (p *Clause) Float64Var(target *float64) { + p.SetValue(newFloat64Value(target)) +} + +// Float64List accumulates float64 values into a slice. +func (p *Clause) Float64List(options ...AccumulatorOption) (target *[]float64) { + target = new([]float64) + p.Float64ListVar(target, options...) + return +} + +func (p *Clause) Float64ListVar(target *[]float64, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newFloat64Value(v.(*float64)) + })) +} + +// -- int Value +type intValue struct{ v *int } + +func newIntValue(p *int) *intValue { + return &intValue{p} +} + +func (f *intValue) Set(s string) error { + v, err := strconv.ParseFloat(s, 64) + if err == nil { + *f.v = (int)(v) + } + return err +} + +func (f *intValue) Get() interface{} { return (int)(*f.v) } + +func (f *intValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Int parses the next command-line value as int. +func (p *Clause) Int() (target *int) { + target = new(int) + p.IntVar(target) + return +} + +func (p *Clause) IntVar(target *int) { + p.SetValue(newIntValue(target)) +} + +// Ints accumulates int values into a slice. +func (p *Clause) Ints(options ...AccumulatorOption) (target *[]int) { + target = new([]int) + p.IntsVar(target, options...) + return +} + +func (p *Clause) IntsVar(target *[]int, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newIntValue(v.(*int)) + })) +} + +// -- int16 Value +type int16Value struct{ v *int16 } + +func newInt16Value(p *int16) *int16Value { + return &int16Value{p} +} + +func (f *int16Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 16) + if err == nil { + *f.v = (int16)(v) + } + return err +} + +func (f *int16Value) Get() interface{} { return (int16)(*f.v) } + +func (f *int16Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Int16 parses the next command-line value as int16. +func (p *Clause) Int16() (target *int16) { + target = new(int16) + p.Int16Var(target) + return +} + +func (p *Clause) Int16Var(target *int16) { + p.SetValue(newInt16Value(target)) +} + +// Int16List accumulates int16 values into a slice. +func (p *Clause) Int16List(options ...AccumulatorOption) (target *[]int16) { + target = new([]int16) + p.Int16ListVar(target, options...) + return +} + +func (p *Clause) Int16ListVar(target *[]int16, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newInt16Value(v.(*int16)) + })) +} + +// -- int32 Value +type int32Value struct{ v *int32 } + +func newInt32Value(p *int32) *int32Value { + return &int32Value{p} +} + +func (f *int32Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 32) + if err == nil { + *f.v = (int32)(v) + } + return err +} + +func (f *int32Value) Get() interface{} { return (int32)(*f.v) } + +func (f *int32Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Int32 parses the next command-line value as int32. +func (p *Clause) Int32() (target *int32) { + target = new(int32) + p.Int32Var(target) + return +} + +func (p *Clause) Int32Var(target *int32) { + p.SetValue(newInt32Value(target)) +} + +// Int32List accumulates int32 values into a slice. +func (p *Clause) Int32List(options ...AccumulatorOption) (target *[]int32) { + target = new([]int32) + p.Int32ListVar(target, options...) + return +} + +func (p *Clause) Int32ListVar(target *[]int32, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newInt32Value(v.(*int32)) + })) +} + +// -- int64 Value +type int64Value struct{ v *int64 } + +func newInt64Value(p *int64) *int64Value { + return &int64Value{p} +} + +func (f *int64Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 64) + if err == nil { + *f.v = (int64)(v) + } + return err +} + +func (f *int64Value) Get() interface{} { return (int64)(*f.v) } + +func (f *int64Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Int64 parses the next command-line value as int64. +func (p *Clause) Int64() (target *int64) { + target = new(int64) + p.Int64Var(target) + return +} + +func (p *Clause) Int64Var(target *int64) { + p.SetValue(newInt64Value(target)) +} + +// Int64List accumulates int64 values into a slice. +func (p *Clause) Int64List(options ...AccumulatorOption) (target *[]int64) { + target = new([]int64) + p.Int64ListVar(target, options...) + return +} + +func (p *Clause) Int64ListVar(target *[]int64, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newInt64Value(v.(*int64)) + })) +} + +// -- int8 Value +type int8Value struct{ v *int8 } + +func newInt8Value(p *int8) *int8Value { + return &int8Value{p} +} + +func (f *int8Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 8) + if err == nil { + *f.v = (int8)(v) + } + return err +} + +func (f *int8Value) Get() interface{} { return (int8)(*f.v) } + +func (f *int8Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Int8 parses the next command-line value as int8. +func (p *Clause) Int8() (target *int8) { + target = new(int8) + p.Int8Var(target) + return +} + +func (p *Clause) Int8Var(target *int8) { + p.SetValue(newInt8Value(target)) +} + +// Int8List accumulates int8 values into a slice. +func (p *Clause) Int8List(options ...AccumulatorOption) (target *[]int8) { + target = new([]int8) + p.Int8ListVar(target, options...) + return +} + +func (p *Clause) Int8ListVar(target *[]int8, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newInt8Value(v.(*int8)) + })) +} + +// -- string Value +type stringValue struct{ v *string } + +func newStringValue(p *string) *stringValue { + return &stringValue{p} +} + +func (f *stringValue) Set(s string) error { + v, err := s, error(nil) + if err == nil { + *f.v = (string)(v) + } + return err +} + +func (f *stringValue) Get() interface{} { return (string)(*f.v) } + +func (f *stringValue) String() string { return string(*f.v) } + +// String parses the next command-line value as string. +func (p *Clause) String() (target *string) { + target = new(string) + p.StringVar(target) + return +} + +func (p *Clause) StringVar(target *string) { + p.SetValue(newStringValue(target)) +} + +// Strings accumulates string values into a slice. +func (p *Clause) Strings(options ...AccumulatorOption) (target *[]string) { + target = new([]string) + p.StringsVar(target, options...) + return +} + +func (p *Clause) StringsVar(target *[]string, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newStringValue(v.(*string)) + })) +} + +// -- uint Value +type uintValue struct{ v *uint } + +func newUintValue(p *uint) *uintValue { + return &uintValue{p} +} + +func (f *uintValue) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + if err == nil { + *f.v = (uint)(v) + } + return err +} + +func (f *uintValue) Get() interface{} { return (uint)(*f.v) } + +func (f *uintValue) String() string { return fmt.Sprintf("%v", *f.v) } + +// Uint parses the next command-line value as uint. +func (p *Clause) Uint() (target *uint) { + target = new(uint) + p.UintVar(target) + return +} + +func (p *Clause) UintVar(target *uint) { + p.SetValue(newUintValue(target)) +} + +// Uints accumulates uint values into a slice. +func (p *Clause) Uints(options ...AccumulatorOption) (target *[]uint) { + target = new([]uint) + p.UintsVar(target, options...) + return +} + +func (p *Clause) UintsVar(target *[]uint, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newUintValue(v.(*uint)) + })) +} + +// -- uint16 Value +type uint16Value struct{ v *uint16 } + +func newUint16Value(p *uint16) *uint16Value { + return &uint16Value{p} +} + +func (f *uint16Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 16) + if err == nil { + *f.v = (uint16)(v) + } + return err +} + +func (f *uint16Value) Get() interface{} { return (uint16)(*f.v) } + +func (f *uint16Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Uint16 parses the next command-line value as uint16. +func (p *Clause) Uint16() (target *uint16) { + target = new(uint16) + p.Uint16Var(target) + return +} + +func (p *Clause) Uint16Var(target *uint16) { + p.SetValue(newUint16Value(target)) +} + +// Uint16List accumulates uint16 values into a slice. +func (p *Clause) Uint16List(options ...AccumulatorOption) (target *[]uint16) { + target = new([]uint16) + p.Uint16ListVar(target, options...) + return +} + +func (p *Clause) Uint16ListVar(target *[]uint16, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newUint16Value(v.(*uint16)) + })) +} + +// -- uint32 Value +type uint32Value struct{ v *uint32 } + +func newUint32Value(p *uint32) *uint32Value { + return &uint32Value{p} +} + +func (f *uint32Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 32) + if err == nil { + *f.v = (uint32)(v) + } + return err +} + +func (f *uint32Value) Get() interface{} { return (uint32)(*f.v) } + +func (f *uint32Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Uint32 parses the next command-line value as uint32. +func (p *Clause) Uint32() (target *uint32) { + target = new(uint32) + p.Uint32Var(target) + return +} + +func (p *Clause) Uint32Var(target *uint32) { + p.SetValue(newUint32Value(target)) +} + +// Uint32List accumulates uint32 values into a slice. +func (p *Clause) Uint32List(options ...AccumulatorOption) (target *[]uint32) { + target = new([]uint32) + p.Uint32ListVar(target, options...) + return +} + +func (p *Clause) Uint32ListVar(target *[]uint32, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newUint32Value(v.(*uint32)) + })) +} + +// -- uint64 Value +type uint64Value struct{ v *uint64 } + +func newUint64Value(p *uint64) *uint64Value { + return &uint64Value{p} +} + +func (f *uint64Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + if err == nil { + *f.v = (uint64)(v) + } + return err +} + +func (f *uint64Value) Get() interface{} { return (uint64)(*f.v) } + +func (f *uint64Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Uint64 parses the next command-line value as uint64. +func (p *Clause) Uint64() (target *uint64) { + target = new(uint64) + p.Uint64Var(target) + return +} + +func (p *Clause) Uint64Var(target *uint64) { + p.SetValue(newUint64Value(target)) +} + +// Uint64List accumulates uint64 values into a slice. +func (p *Clause) Uint64List(options ...AccumulatorOption) (target *[]uint64) { + target = new([]uint64) + p.Uint64ListVar(target, options...) + return +} + +func (p *Clause) Uint64ListVar(target *[]uint64, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newUint64Value(v.(*uint64)) + })) +} + +// -- uint8 Value +type uint8Value struct{ v *uint8 } + +func newUint8Value(p *uint8) *uint8Value { + return &uint8Value{p} +} + +func (f *uint8Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 8) + if err == nil { + *f.v = (uint8)(v) + } + return err +} + +func (f *uint8Value) Get() interface{} { return (uint8)(*f.v) } + +func (f *uint8Value) String() string { return fmt.Sprintf("%v", *f.v) } + +// Uint8 parses the next command-line value as uint8. +func (p *Clause) Uint8() (target *uint8) { + target = new(uint8) + p.Uint8Var(target) + return +} + +func (p *Clause) Uint8Var(target *uint8) { + p.SetValue(newUint8Value(target)) +} + +// Uint8List accumulates uint8 values into a slice. +func (p *Clause) Uint8List(options ...AccumulatorOption) (target *[]uint8) { + target = new([]uint8) + p.Uint8ListVar(target, options...) + return +} + +func (p *Clause) Uint8ListVar(target *[]uint8, options ...AccumulatorOption) { + p.SetValue(newAccumulator(target, options, func(v interface{}) Value { + return newUint8Value(v.(*uint8)) + })) +} diff --git a/vendor/vendor.json b/vendor/vendor.json index 53ebded71f..24953ef02b 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -3,6 +3,21 @@ "ignore": "test", "package": [ { +<<<<<<< HEAD +======= + "checksumSHA1": "DYv6Q1+VfnUVxMwvk5IshAClLvw=", + "path": "github.com/Sirupsen/logrus", + "revision": "5e5dc898656f695e2a086b8e12559febbfc01562", + "revisionTime": "2017-05-15T10:45:16Z" + }, + { + "checksumSHA1": "XhkBA2yTAxMmcxk3dPqh8JNU4II=", + "path": "github.com/alecthomas/kingpin", + "revision": "75c60582c55399763d53eeb59078d63d3616f58c", + "revisionTime": "2017-12-17T17:58:06Z" + }, + { +>>>>>>> Switch cmd/amtool to kingpin "checksumSHA1": "4QnLdmB1kG3N+KlDd1N+G9TWAGQ=", "path": "github.com/beorn7/perks/quantile", "revision": "1e01b2bdbae8edb393fcf555732304f34d192fc9", From d98a05e76367a4cc128c1f559dd0a62e5e07eaf4 Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Tue, 19 Dec 2017 21:26:47 +0100 Subject: [PATCH 2/8] Touch-ups --- cli/format/format.go | 13 ++++++++++--- cli/root.go | 19 +++++++++++-------- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/cli/format/format.go b/cli/format/format.go index b180f66e22..2b277c6c67 100644 --- a/cli/format/format.go +++ b/cli/format/format.go @@ -4,6 +4,7 @@ import ( "io" "time" + "github.com/alecthomas/kingpin" "github.com/prometheus/alertmanager/config" "github.com/prometheus/alertmanager/dispatch" "github.com/prometheus/alertmanager/types" @@ -11,6 +12,14 @@ import ( const DefaultDateFormat = "2006-01-02 15:04:05 MST" +var ( + dateFormat *string +) + +func InitFormatFlags(app *kingpin.Application) { + dateFormat = app.Flag("date.format", "Format of date output").Default(DefaultDateFormat).String() +} + // Config representation // Need to get this moved to the prometheus/common/model repo having is duplicated here is smelly type Config struct { @@ -45,7 +54,5 @@ type Formatter interface { var Formatters = map[string]Formatter{} func FormatDate(input time.Time) string { - // FIX-BEFORE-MERGE - dateformat := DefaultDateFormat //viper.GetString("date.format") - return input.Format(dateformat) + return input.Format(*dateFormat) } diff --git a/cli/root.go b/cli/root.go index fa276e679f..9f2cad15aa 100644 --- a/cli/root.go +++ b/cli/root.go @@ -5,6 +5,7 @@ import ( "os" "github.com/alecthomas/kingpin" + "github.com/prometheus/alertmanager/cli/format" "github.com/prometheus/common/version" "gopkg.in/yaml.v2" ) @@ -58,15 +59,12 @@ func (r amtoolConfigResolver) Resolve(key string, context *kingpin.ParseContext) } /* -`View and modify the current Alertmanager state. +View and modify the current Alertmanager state. [Config File] -The alertmanager tool will read a config file from the --config cli argument, AMTOOL_CONFIG environment variable or -from one of two default config locations. Valid config file formats are JSON, TOML, YAML, HCL and Java Properties, use -whatever makes sense for your project. - -The default config file paths are $HOME/.config/amtool/config.yml or /etc/amtool/config.yml +The alertmanager tool will read a config file in YAML format from one of two default config locations: +$HOME/.config/amtool/config.yml or /etc/amtool/config.yml The accepted config options are as follows: @@ -81,11 +79,16 @@ The accepted config options are as follows: output Set a default output type. Options are (simple, extended, json) + + date.format + Sets the output format for dates. Defaults to 2006-01-02 15:04:05 MST */ -// Execute adds all child commands to the root command sets flags appropriately. -// This is called by main.main(). It only needs to happen once to the rootCmd. +// Execute parses the arguments and executes the corresponding command, it is +// called by cmd/amtool/main.main(). func Execute() { + format.InitFormatFlags(app) + app.Version(version.Print("amtool")) app.GetFlag("help").Short('h') From c4706f0065dfa6eb882eea40aa66166cccf8a07c Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Tue, 19 Dec 2017 22:01:49 +0100 Subject: [PATCH 3/8] Implement long help --- cli/alert.go | 47 +++++++++++++------------ cli/check_config.go | 9 ++--- cli/config.go | 8 +++++ cli/root.go | 79 +++++++++++++++++++++++++++++++++++++------ cli/silence_add.go | 8 ++--- cli/silence_expire.go | 1 + cli/silence_import.go | 21 +++++------- cli/silence_query.go | 49 +++++++++++++-------------- cli/silence_update.go | 5 +-- 9 files changed, 140 insertions(+), 87 deletions(-) diff --git a/cli/alert.go b/cli/alert.go index 033a5e868f..f13d6c0d34 100644 --- a/cli/alert.go +++ b/cli/alert.go @@ -35,30 +35,6 @@ type alertBlock struct { Alerts []*dispatch.APIAlert `json:"alerts"` } -/* - View and search through current alerts. - - Amtool has a simplified prometheus query syntax, but contains robust support for - bash variable expansions. The non-option section of arguments constructs a list - of "Matcher Groups" that will be used to filter your query. The following - examples will attempt to show this behaviour in action: - - amtool alert query alertname=foo node=bar - - This query will match all alerts with the alertname=foo and node=bar label - value pairs set. - - amtool alert query foo node=bar - - If alertname is ommited and the first argument does not contain a '=' or a - '=~' then it will be assumed to be the value of the alertname pair. - - amtool alert query 'alertname=~foo.*' - - As well as direct equality, regex matching is also supported. The '=~' syntax - (similar to prometheus) is used to represent a regex match. Regex matching - can be used in combination with a direct match. -*/ var ( alertCmd = app.Command("alert", "View and search through current alerts") alertQueryCmd = alertCmd.Command("query", "View and search through current alerts").Default() @@ -69,6 +45,29 @@ var ( func init() { alertQueryCmd.Action(queryAlerts) + longHelpText["alert"] = `View and search through current alerts. + +Amtool has a simplified prometheus query syntax, but contains robust support for +bash variable expansions. The non-option section of arguments constructs a list +of "Matcher Groups" that will be used to filter your query. The following +examples will attempt to show this behaviour in action: + +amtool alert query alertname=foo node=bar + + This query will match all alerts with the alertname=foo and node=bar label + value pairs set. + +amtool alert query foo node=bar + + If alertname is ommited and the first argument does not contain a '=' or a + '=~' then it will be assumed to be the value of the alertname pair. + +amtool alert query 'alertname=~foo.*' + + As well as direct equality, regex matching is also supported. The '=~' syntax + (similar to prometheus) is used to represent a regex match. Regex matching + can be used in combination with a direct match.` + longHelpText["alert query"] = longHelpText["alert"] } func fetchAlerts(filter string) ([]*dispatch.APIAlert, error) { diff --git a/cli/check_config.go b/cli/check_config.go index 8f44d4330f..27c869303d 100644 --- a/cli/check_config.go +++ b/cli/check_config.go @@ -16,15 +16,12 @@ var ( func init() { checkConfigCmd.Action(checkConfig) -} - -/* -Validate alertmanager config files + longHelpText["check-config"] = `Validate alertmanager config files Will validate the syntax and schema for alertmanager config file and associated templates. Non existing templates will not trigger -errors -*/ +errors` +} func checkConfig(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { return CheckConfig(*checkFiles) diff --git a/cli/config.go b/cli/config.go index 7114081a5f..1f676de362 100644 --- a/cli/config.go +++ b/cli/config.go @@ -44,6 +44,14 @@ type alertmanagerStatusResponse struct { // configCmd represents the config command var configCmd = app.Command("config", "View the running config").Action(queryConfig) +func init() { + longHelpText["config"] = `View current config +The amount of output is controlled by the output selection flag: + - Simple: Print just the running config + - Extended: Print the running config as well as uptime and all version info + - Json: Print entire config object as json` +} + func fetchConfig() (Config, error) { configResponse := alertmanagerStatusResponse{} diff --git a/cli/root.go b/cli/root.go index 9f2cad15aa..d2814e21d6 100644 --- a/cli/root.go +++ b/cli/root.go @@ -15,6 +15,10 @@ var ( verbose = app.Flag("verbose", "Verbose running information").Short('v').Bool() alertmanagerUrl = app.Flag("alertmanager.url", "Alertmanager to talk to").Required().URL() output = app.Flag("output", "Output formatter (simple, extended, json)").Default("simple").Enum("simple", "extended", "json") + + // This contains a mapping from command path to the long-format help string + // Separate subcommands with spaces, eg longHelpText["silence query"] + longHelpText = make(map[string]string) ) type amtoolConfigResolver struct { @@ -58,21 +62,23 @@ func (r amtoolConfigResolver) Resolve(key string, context *kingpin.ParseContext) return nil, nil } -/* -View and modify the current Alertmanager state. - -[Config File] +func init() { + longHelpText["root"] = `View and modify the current Alertmanager state. -The alertmanager tool will read a config file in YAML format from one of two default config locations: -$HOME/.config/amtool/config.yml or /etc/amtool/config.yml +Config File: +The alertmanager tool will read a config file in YAML format from one of two +default config locations: $HOME/.config/amtool/config.yml or +/etc/amtool/config.yml -The accepted config options are as follows: +All flags can be given in the config file, but the following are the suited for +static configuration: alertmanager.url Set a default alertmanager url for each request author - Set a default author value for new silences. If this argument is not specified then the username will be used + Set a default author value for new silences. If this argument is not + specified then the username will be used comment_required Require a comment on silence creation @@ -81,8 +87,9 @@ The accepted config options are as follows: Set a default output type. Options are (simple, extended, json) date.format - Sets the output format for dates. Defaults to 2006-01-02 15:04:05 MST -*/ + Sets the output format for dates. Defaults to "2006-01-02 15:04:05 MST" +` +} // Execute parses the arguments and executes the corresponding command, it is // called by cmd/amtool/main.main(). @@ -91,6 +98,11 @@ func Execute() { app.Version(version.Print("amtool")) app.GetFlag("help").Short('h') + app.UsageTemplate(kingpin.CompactUsageTemplate) + app.Flag("long-help", "Give more detailed help output").UsageAction(&kingpin.UsageContext{ + Template: longHelpTemplate, + Vars: map[string]interface{}{"LongHelp": longHelpText}, + }).Bool() app.Resolver(newConfigResolver()) _, err := app.Parse(os.Args[1:]) @@ -98,3 +110,50 @@ func Execute() { kingpin.Fatalf("%v\n", err) } } + +const longHelpTemplate = `{{define "FormatCommands" -}} +{{range .FlattenedCommands -}} +{{if not .Hidden}} + {{.CmdSummary}} +{{.Help|Wrap 4}} +{{if .Flags -}} +{{with .Flags|FlagsToTwoColumns}}{{FormatTwoColumnsWithIndent . 4 2}}{{end}} +{{end -}} +{{end -}} +{{end -}} +{{end -}} + +{{define "FormatUsage" -}} +{{.AppSummary}} +{{if .Help}} +{{.Help|Wrap 0 -}} +{{end -}} + +{{end -}} + +{{if .Context.SelectedCommand -}} +{{T "usage:"}} {{.App.Name}} {{.App.FlagSummary}} {{.Context.SelectedCommand.CmdSummary}} + +{{index .LongHelp .Context.SelectedCommand.FullCommand}} +{{else}} +{{T "usage:"}} {{template "FormatUsage" .App}} +{{index .LongHelp "root"}} +{{end}} +{{if .Context.Flags -}} +{{T "Flags:"}} +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.Args -}} +{{T "Args:"}} +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end -}} +{{if .Context.SelectedCommand -}} +{{if len .Context.SelectedCommand.Commands -}} +{{T "Subcommands:"}} +{{template "FormatCommands" .Context.SelectedCommand}} +{{end -}} +{{else if .App.Commands -}} +{{T "Commands:" -}} +{{template "FormatCommands" .App}} +{{end -}} +` diff --git a/cli/silence_add.go b/cli/silence_add.go index 3a64948146..08a37630b8 100644 --- a/cli/silence_add.go +++ b/cli/silence_add.go @@ -42,10 +42,7 @@ var ( func init() { addCmd.Action(add) -} - -/* -Add a new alertmanager silence + longHelpText["silence add"] = `Add a new alertmanager silence Amtool uses a simplified prometheus syntax to represent silences. The non-option section of arguments constructs a list of "Matcher Groups" @@ -67,7 +64,8 @@ Add a new alertmanager silence As well as direct equality, regex matching is also supported. The '=~' syntax (similar to prometheus) is used to represent a regex match. Regex matching can be used in combination with a direct match. -*/ +` +} func add(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { var err error diff --git a/cli/silence_expire.go b/cli/silence_expire.go index 71ff90f173..ffb0e073e1 100644 --- a/cli/silence_expire.go +++ b/cli/silence_expire.go @@ -16,6 +16,7 @@ var ( func init() { expireCmd.Action(expire) + longHelpText["silence expire"] = `Expire an alertmanager silence` } func expire(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { diff --git a/cli/silence_import.go b/cli/silence_import.go index e0f66ce12c..4f906761c1 100644 --- a/cli/silence_import.go +++ b/cli/silence_import.go @@ -11,18 +11,6 @@ import ( "github.com/prometheus/alertmanager/types" ) -/* - Long: `Import alertmanager silences from JSON file or stdin - - This command can be used to bulk import silences from a JSON file - created by query command. For example: - - amtool silence query -o json foo > foo.json - amtool silence import foo.json - - JSON data can also come from stdin if no param is specified. -*/ - var ( importCmd = silenceCmd.Command("import", "Import silences") force = importCmd.Flag("force", "Force adding new silences even if it already exists").Short('f').Bool() @@ -32,6 +20,15 @@ var ( func init() { importCmd.Action(bulkImport) + longHelpText["silence import"] = `Import alertmanager silences from JSON file or stdin + +This command can be used to bulk import silences from a JSON file +created by query command. For example: + +amtool silence query -o json foo > foo.json +amtool silence import foo.json + +JSON data can also come from stdin if no param is specified.` } func addSilenceWorker(silencec <-chan *types.Silence, errc chan<- error) { diff --git a/cli/silence_query.go b/cli/silence_query.go index b52a39e303..a7f57aca77 100644 --- a/cli/silence_query.go +++ b/cli/silence_query.go @@ -24,42 +24,39 @@ var ( func init() { queryCmd.Action(query) -} - -/* - Query Alertmanager silences. + longHelpText["silence query"] = `Query Alertmanager silences. - Amtool has a simplified prometheus query syntax, but contains robust support for - bash variable expansions. The non-option section of arguments constructs a list - of "Matcher Groups" that will be used to filter your query. The following - examples will attempt to show this behaviour in action: +Amtool has a simplified prometheus query syntax, but contains robust support for +bash variable expansions. The non-option section of arguments constructs a list +of "Matcher Groups" that will be used to filter your query. The following +examples will attempt to show this behaviour in action: - amtool silence query alertname=foo node=bar +amtool silence query alertname=foo node=bar - This query will match all silences with the alertname=foo and node=bar label - value pairs set. + This query will match all silences with the alertname=foo and node=bar label + value pairs set. - amtool silence query foo node=bar +amtool silence query foo node=bar - If alertname is ommited and the first argument does not contain a '=' or a - '=~' then it will be assumed to be the value of the alertname pair. + If alertname is ommited and the first argument does not contain a '=' or a + '=~' then it will be assumed to be the value of the alertname pair. - amtool silence query 'alertname=~foo.*' +amtool silence query 'alertname=~foo.*' - As well as direct equality, regex matching is also supported. The '=~' syntax - (similar to prometheus) is used to represent a regex match. Regex matching - can be used in combination with a direct match. + As well as direct equality, regex matching is also supported. The '=~' syntax + (similar to prometheus) is used to represent a regex match. Regex matching + can be used in combination with a direct match. - In addition to filtering by silence labels, one can also query for silences - that are due to expire soon with the "--within" parameter. In the event that - you want to preemptively act upon expiring silences by either fixing them or - extending them. For example: +In addition to filtering by silence labels, one can also query for silences +that are due to expire soon with the "--within" parameter. In the event that +you want to preemptively act upon expiring silences by either fixing them or +extending them. For example: - amtool silence query --within 8h +amtool silence query --within 8h - gives all the silences due to expire within the next 8 hours. This syntax can - also be combined with the label based filtering above for more flexibility. -*/ +gives all the silences due to expire within the next 8 hours. This syntax can +also be combined with the label based filtering above for more flexibility.` +} func fetchSilences(filter string) ([]types.Silence, error) { silenceResponse := alertmanagerSilenceResponse{} diff --git a/cli/silence_update.go b/cli/silence_update.go index 72e6130477..29caadf05e 100644 --- a/cli/silence_update.go +++ b/cli/silence_update.go @@ -29,12 +29,9 @@ var ( updateIds = updateCmd.Arg("update-ids", "Silence IDs to update").Strings() ) -/* -Extend or update existing silence in Alertmanager. -*/ - func init() { updateCmd.Action(update) + longHelpText["silence update"] = `Extend or update existing silence in Alertmanager.` } func update(element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { From 3431d210696181a8d11acc5ec5a4877ee97bc97d Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Tue, 19 Dec 2017 22:19:15 +0100 Subject: [PATCH 4/8] Add missing short-form of --output --- cli/root.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/root.go b/cli/root.go index d2814e21d6..f889c9c865 100644 --- a/cli/root.go +++ b/cli/root.go @@ -14,7 +14,7 @@ var ( app = kingpin.New("amtool", "Alertmanager CLI").DefaultEnvars() verbose = app.Flag("verbose", "Verbose running information").Short('v').Bool() alertmanagerUrl = app.Flag("alertmanager.url", "Alertmanager to talk to").Required().URL() - output = app.Flag("output", "Output formatter (simple, extended, json)").Default("simple").Enum("simple", "extended", "json") + output = app.Flag("output", "Output formatter (simple, extended, json)").Short('o').Default("simple").Enum("simple", "extended", "json") // This contains a mapping from command path to the long-format help string // Separate subcommands with spaces, eg longHelpText["silence query"] From e8190b9177d42095656c9a55cb38bc0618cb659c Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Tue, 19 Dec 2017 22:41:40 +0100 Subject: [PATCH 5/8] Fix backwards compatibility for config file options --- cli/root.go | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/cli/root.go b/cli/root.go index f889c9c865..d67b83d655 100644 --- a/cli/root.go +++ b/cli/root.go @@ -62,6 +62,16 @@ func (r amtoolConfigResolver) Resolve(key string, context *kingpin.ParseContext) return nil, nil } +// This function maps things which have previously had different names in the +// config file to their new names, so old configrations keep working +func backwardsCompatibilityResolver(key string) string { + switch key { + case "require-comment": + return "comment_required" + } + return key +} + func init() { longHelpText["root"] = `View and modify the current Alertmanager state. @@ -80,8 +90,8 @@ static configuration: Set a default author value for new silences. If this argument is not specified then the username will be used - comment_required - Require a comment on silence creation + require-comment + Bool, whether to require a comment on silence creation. Defaults to true output Set a default output type. Options are (simple, extended, json) @@ -104,7 +114,15 @@ func Execute() { Vars: map[string]interface{}{"LongHelp": longHelpText}, }).Bool() - app.Resolver(newConfigResolver()) + configResolver := newConfigResolver() + // Use the same resolver twice, first for checking backwards compatibility, + // then again for the new names. This order ensures that the newest wins, if + // both old and new are present + app.Resolver( + kingpin.RenamingResolver(configResolver, backwardsCompatibilityResolver), + configResolver, + ) + _, err := app.Parse(os.Args[1:]) if err != nil { kingpin.Fatalf("%v\n", err) From 09a2e7444ef6b8f961cd3c502bb61ce2a3175c34 Mon Sep 17 00:00:00 2001 From: Calle Pettersson Date: Tue, 19 Dec 2017 23:02:59 +0100 Subject: [PATCH 6/8] Fix vendoring --- .../LICENSE.md => alecthomas/units/COPYING} | 16 +- vendor/github.com/alecthomas/units/README.md | 11 + vendor/github.com/alecthomas/units/bytes.go | 83 + vendor/github.com/alecthomas/units/doc.go | 13 + vendor/github.com/alecthomas/units/si.go | 26 + vendor/github.com/alecthomas/units/util.go | 138 + .../cpuguy83/go-md2man/md2man/md2man.go | 19 - .../cpuguy83/go-md2man/md2man/roff.go | 282 - vendor/github.com/fsnotify/fsnotify/AUTHORS | 46 - .../github.com/fsnotify/fsnotify/CHANGELOG.md | 307 - .../fsnotify/fsnotify/CONTRIBUTING.md | 77 - vendor/github.com/fsnotify/fsnotify/LICENSE | 28 - vendor/github.com/fsnotify/fsnotify/README.md | 79 - vendor/github.com/fsnotify/fsnotify/fen.go | 37 - .../github.com/fsnotify/fsnotify/fsnotify.go | 66 - .../github.com/fsnotify/fsnotify/inotify.go | 334 - .../fsnotify/fsnotify/inotify_poller.go | 187 - vendor/github.com/fsnotify/fsnotify/kqueue.go | 503 -- .../fsnotify/fsnotify/open_mode_bsd.go | 11 - .../fsnotify/fsnotify/open_mode_darwin.go | 12 - .../github.com/fsnotify/fsnotify/windows.go | 561 -- vendor/github.com/hashicorp/hcl/LICENSE | 354 - vendor/github.com/hashicorp/hcl/Makefile | 18 - vendor/github.com/hashicorp/hcl/README.md | 125 - vendor/github.com/hashicorp/hcl/decoder.go | 724 -- vendor/github.com/hashicorp/hcl/hcl.go | 11 - .../github.com/hashicorp/hcl/hcl/ast/ast.go | 219 - .../github.com/hashicorp/hcl/hcl/ast/walk.go | 52 - .../hashicorp/hcl/hcl/parser/error.go | 17 - .../hashicorp/hcl/hcl/parser/parser.go | 514 -- .../hashicorp/hcl/hcl/scanner/scanner.go | 651 -- .../hashicorp/hcl/hcl/strconv/quote.go | 241 - .../hashicorp/hcl/hcl/token/position.go | 46 - .../hashicorp/hcl/hcl/token/token.go | 219 - .../hashicorp/hcl/json/parser/flatten.go | 117 - .../hashicorp/hcl/json/parser/parser.go | 313 - .../hashicorp/hcl/json/scanner/scanner.go | 451 - .../hashicorp/hcl/json/token/position.go | 46 - .../hashicorp/hcl/json/token/token.go | 118 - vendor/github.com/hashicorp/hcl/lex.go | 38 - vendor/github.com/hashicorp/hcl/parse.go | 39 - .../inconshreveable/mousetrap/LICENSE | 13 - .../inconshreveable/mousetrap/README.md | 23 - .../inconshreveable/mousetrap/trap_others.go | 15 - .../inconshreveable/mousetrap/trap_windows.go | 98 - .../mousetrap/trap_windows_1.4.go | 46 - .../magiconair/properties/CHANGELOG.md | 90 - .../github.com/magiconair/properties/LICENSE | 25 - .../magiconair/properties/README.md | 81 - .../magiconair/properties/decode.go | 289 - .../github.com/magiconair/properties/doc.go | 156 - .../magiconair/properties/integrate.go | 34 - .../github.com/magiconair/properties/lex.go | 408 - .../github.com/magiconair/properties/load.go | 232 - .../magiconair/properties/parser.go | 95 - .../magiconair/properties/properties.go | 777 -- .../magiconair/properties/rangecheck.go | 31 - .../mitchellh/mapstructure/README.md | 46 - .../mitchellh/mapstructure/decode_hooks.go | 154 - .../mitchellh/mapstructure/error.go | 50 - .../mitchellh/mapstructure/mapstructure.go | 809 -- .../go-i18n}/LICENSE | 4 +- .../nicksnyder/go-i18n/i18n/bundle/bundle.go | 444 + .../nicksnyder/go-i18n/i18n/i18n.go | 158 + .../go-i18n/i18n/language/language.go | 99 + .../go-i18n/i18n/language/operands.go | 119 + .../go-i18n/i18n/language/plural.go | 40 + .../go-i18n/i18n/language/pluralspec.go | 75 + .../go-i18n/i18n/language/pluralspec_gen.go | 557 ++ .../i18n/translation/plural_translation.go | 82 + .../i18n/translation/single_translation.go | 61 + .../go-i18n/i18n/translation/template.go | 65 + .../go-i18n/i18n/translation/translation.go | 84 + .../russross/blackfriday/LICENSE.txt | 29 - .../github.com/russross/blackfriday/README.md | 276 - .../github.com/russross/blackfriday/block.go | 1430 --- .../github.com/russross/blackfriday/html.go | 949 -- .../github.com/russross/blackfriday/inline.go | 1148 --- .../github.com/russross/blackfriday/latex.go | 332 - .../russross/blackfriday/markdown.go | 926 -- .../russross/blackfriday/smartypants.go | 400 - .../shurcooL/sanitized_anchor_name/LICENSE | 19 - .../shurcooL/sanitized_anchor_name/README.md | 34 - .../shurcooL/sanitized_anchor_name/main.go | 29 - vendor/github.com/spf13/afero/LICENSE.txt | 174 - vendor/github.com/spf13/afero/README.md | 449 - vendor/github.com/spf13/afero/afero.go | 108 - vendor/github.com/spf13/afero/basepath.go | 145 - .../github.com/spf13/afero/cacheOnReadFs.go | 295 - vendor/github.com/spf13/afero/const_bsds.go | 22 - .../github.com/spf13/afero/const_win_unix.go | 25 - .../github.com/spf13/afero/copyOnWriteFs.go | 253 - vendor/github.com/spf13/afero/httpFs.go | 110 - vendor/github.com/spf13/afero/ioutil.go | 230 - vendor/github.com/spf13/afero/mem/dir.go | 37 - vendor/github.com/spf13/afero/mem/dirmap.go | 43 - vendor/github.com/spf13/afero/mem/file.go | 285 - vendor/github.com/spf13/afero/memmap.go | 361 - vendor/github.com/spf13/afero/memradix.go | 14 - vendor/github.com/spf13/afero/os.go | 94 - vendor/github.com/spf13/afero/path.go | 108 - vendor/github.com/spf13/afero/readonlyfs.go | 70 - vendor/github.com/spf13/afero/regexpfs.go | 214 - vendor/github.com/spf13/afero/unionFile.go | 274 - vendor/github.com/spf13/afero/util.go | 331 - vendor/github.com/spf13/cast/LICENSE | 21 - vendor/github.com/spf13/cast/README.md | 72 - vendor/github.com/spf13/cast/cast.go | 83 - vendor/github.com/spf13/cast/caste.go | 539 -- vendor/github.com/spf13/cobra/LICENSE.txt | 174 - vendor/github.com/spf13/cobra/README.md | 942 -- vendor/github.com/spf13/cobra/args.go | 98 - .../spf13/cobra/bash_completions.go | 537 -- .../spf13/cobra/bash_completions.md | 206 - vendor/github.com/spf13/cobra/cobra.go | 190 - vendor/github.com/spf13/cobra/command.go | 1306 --- .../github.com/spf13/cobra/command_notwin.go | 5 - vendor/github.com/spf13/cobra/command_win.go | 20 - vendor/github.com/spf13/cobra/doc/man_docs.go | 236 - vendor/github.com/spf13/cobra/doc/man_docs.md | 31 - vendor/github.com/spf13/cobra/doc/md_docs.go | 159 - vendor/github.com/spf13/cobra/doc/md_docs.md | 115 - vendor/github.com/spf13/cobra/doc/util.go | 51 - .../github.com/spf13/cobra/doc/yaml_docs.go | 169 - .../github.com/spf13/cobra/doc/yaml_docs.md | 112 - .../github.com/spf13/cobra/zsh_completions.go | 114 - .../spf13/jwalterweatherman/LICENSE | 21 - .../spf13/jwalterweatherman/README.md | 148 - .../jwalterweatherman/default_notepad.go | 113 - .../spf13/jwalterweatherman/log_counter.go | 56 - .../spf13/jwalterweatherman/notepad.go | 195 - vendor/github.com/spf13/pflag/LICENSE | 28 - vendor/github.com/spf13/pflag/README.md | 296 - vendor/github.com/spf13/pflag/bool.go | 94 - vendor/github.com/spf13/pflag/bool_slice.go | 147 - vendor/github.com/spf13/pflag/count.go | 96 - vendor/github.com/spf13/pflag/duration.go | 86 - vendor/github.com/spf13/pflag/flag.go | 1132 --- vendor/github.com/spf13/pflag/float32.go | 88 - vendor/github.com/spf13/pflag/float64.go | 84 - vendor/github.com/spf13/pflag/golangflag.go | 101 - vendor/github.com/spf13/pflag/int.go | 84 - vendor/github.com/spf13/pflag/int32.go | 88 - vendor/github.com/spf13/pflag/int64.go | 84 - vendor/github.com/spf13/pflag/int8.go | 88 - vendor/github.com/spf13/pflag/int_slice.go | 128 - vendor/github.com/spf13/pflag/ip.go | 94 - vendor/github.com/spf13/pflag/ip_slice.go | 148 - vendor/github.com/spf13/pflag/ipmask.go | 122 - vendor/github.com/spf13/pflag/ipnet.go | 98 - vendor/github.com/spf13/pflag/string.go | 80 - vendor/github.com/spf13/pflag/string_array.go | 103 - vendor/github.com/spf13/pflag/string_slice.go | 129 - vendor/github.com/spf13/pflag/uint.go | 88 - vendor/github.com/spf13/pflag/uint16.go | 88 - vendor/github.com/spf13/pflag/uint32.go | 88 - vendor/github.com/spf13/pflag/uint64.go | 88 - vendor/github.com/spf13/pflag/uint8.go | 88 - vendor/github.com/spf13/pflag/uint_slice.go | 126 - vendor/github.com/spf13/viper/LICENSE | 21 - vendor/github.com/spf13/viper/README.md | 621 -- vendor/github.com/spf13/viper/flags.go | 57 - vendor/github.com/spf13/viper/nohup.out | 1 - vendor/github.com/spf13/viper/util.go | 282 - vendor/github.com/spf13/viper/viper.go | 1517 ---- vendor/golang.org/x/sys/LICENSE | 27 - vendor/golang.org/x/sys/PATENTS | 22 - vendor/golang.org/x/sys/unix/README.md | 173 - vendor/golang.org/x/sys/unix/asm_darwin_386.s | 29 - .../golang.org/x/sys/unix/asm_darwin_amd64.s | 29 - vendor/golang.org/x/sys/unix/asm_darwin_arm.s | 30 - .../golang.org/x/sys/unix/asm_darwin_arm64.s | 30 - .../x/sys/unix/asm_dragonfly_amd64.s | 29 - .../golang.org/x/sys/unix/asm_freebsd_386.s | 29 - .../golang.org/x/sys/unix/asm_freebsd_amd64.s | 29 - .../golang.org/x/sys/unix/asm_freebsd_arm.s | 29 - vendor/golang.org/x/sys/unix/asm_linux_386.s | 35 - .../golang.org/x/sys/unix/asm_linux_amd64.s | 29 - vendor/golang.org/x/sys/unix/asm_linux_arm.s | 29 - .../golang.org/x/sys/unix/asm_linux_arm64.s | 24 - .../golang.org/x/sys/unix/asm_linux_mips64x.s | 28 - .../golang.org/x/sys/unix/asm_linux_mipsx.s | 31 - .../golang.org/x/sys/unix/asm_linux_ppc64x.s | 28 - .../golang.org/x/sys/unix/asm_linux_s390x.s | 28 - vendor/golang.org/x/sys/unix/asm_netbsd_386.s | 29 - .../golang.org/x/sys/unix/asm_netbsd_amd64.s | 29 - vendor/golang.org/x/sys/unix/asm_netbsd_arm.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_386.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_amd64.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_arm.s | 29 - .../golang.org/x/sys/unix/asm_solaris_amd64.s | 17 - .../golang.org/x/sys/unix/bluetooth_linux.go | 35 - vendor/golang.org/x/sys/unix/cap_freebsd.go | 195 - vendor/golang.org/x/sys/unix/constants.go | 13 - vendor/golang.org/x/sys/unix/dev_darwin.go | 24 - vendor/golang.org/x/sys/unix/dev_dragonfly.go | 30 - vendor/golang.org/x/sys/unix/dev_freebsd.go | 30 - vendor/golang.org/x/sys/unix/dev_linux.go | 42 - vendor/golang.org/x/sys/unix/dev_netbsd.go | 29 - vendor/golang.org/x/sys/unix/dev_openbsd.go | 29 - vendor/golang.org/x/sys/unix/dirent.go | 102 - vendor/golang.org/x/sys/unix/endian_big.go | 9 - vendor/golang.org/x/sys/unix/endian_little.go | 9 - vendor/golang.org/x/sys/unix/env_unix.go | 27 - vendor/golang.org/x/sys/unix/env_unset.go | 14 - .../x/sys/unix/errors_freebsd_386.go | 227 - .../x/sys/unix/errors_freebsd_amd64.go | 227 - .../x/sys/unix/errors_freebsd_arm.go | 226 - vendor/golang.org/x/sys/unix/file_unix.go | 27 - vendor/golang.org/x/sys/unix/flock.go | 22 - .../x/sys/unix/flock_linux_32bit.go | 13 - vendor/golang.org/x/sys/unix/gccgo.go | 46 - vendor/golang.org/x/sys/unix/gccgo_c.c | 41 - .../x/sys/unix/gccgo_linux_amd64.go | 20 - vendor/golang.org/x/sys/unix/mkall.sh | 194 - vendor/golang.org/x/sys/unix/mkerrors.sh | 566 -- vendor/golang.org/x/sys/unix/mkpost.go | 88 - vendor/golang.org/x/sys/unix/mksyscall.pl | 328 - .../x/sys/unix/mksyscall_solaris.pl | 289 - .../golang.org/x/sys/unix/mksysctl_openbsd.pl | 264 - .../golang.org/x/sys/unix/mksysnum_darwin.pl | 39 - .../x/sys/unix/mksysnum_dragonfly.pl | 50 - .../golang.org/x/sys/unix/mksysnum_freebsd.pl | 50 - .../golang.org/x/sys/unix/mksysnum_netbsd.pl | 58 - .../golang.org/x/sys/unix/mksysnum_openbsd.pl | 50 - .../golang.org/x/sys/unix/openbsd_pledge.go | 38 - vendor/golang.org/x/sys/unix/pagesize_unix.go | 15 - vendor/golang.org/x/sys/unix/race.go | 30 - vendor/golang.org/x/sys/unix/race0.go | 25 - .../golang.org/x/sys/unix/sockcmsg_linux.go | 36 - vendor/golang.org/x/sys/unix/sockcmsg_unix.go | 104 - vendor/golang.org/x/sys/unix/str.go | 26 - vendor/golang.org/x/sys/unix/syscall.go | 69 - vendor/golang.org/x/sys/unix/syscall_bsd.go | 635 -- .../golang.org/x/sys/unix/syscall_darwin.go | 536 -- .../x/sys/unix/syscall_darwin_386.go | 75 - .../x/sys/unix/syscall_darwin_amd64.go | 75 - .../x/sys/unix/syscall_darwin_arm.go | 69 - .../x/sys/unix/syscall_darwin_arm64.go | 75 - .../x/sys/unix/syscall_dragonfly.go | 415 - .../x/sys/unix/syscall_dragonfly_amd64.go | 59 - .../golang.org/x/sys/unix/syscall_freebsd.go | 708 -- .../x/sys/unix/syscall_freebsd_386.go | 59 - .../x/sys/unix/syscall_freebsd_amd64.go | 59 - .../x/sys/unix/syscall_freebsd_arm.go | 59 - vendor/golang.org/x/sys/unix/syscall_linux.go | 1470 ---- .../x/sys/unix/syscall_linux_386.go | 397 - .../x/sys/unix/syscall_linux_amd64.go | 150 - .../x/sys/unix/syscall_linux_amd64_gc.go | 13 - .../x/sys/unix/syscall_linux_arm.go | 261 - .../x/sys/unix/syscall_linux_arm64.go | 193 - .../x/sys/unix/syscall_linux_mips64x.go | 212 - .../x/sys/unix/syscall_linux_mipsx.go | 237 - .../x/sys/unix/syscall_linux_ppc64x.go | 133 - .../x/sys/unix/syscall_linux_s390x.go | 326 - .../x/sys/unix/syscall_linux_sparc64.go | 149 - .../golang.org/x/sys/unix/syscall_netbsd.go | 472 - .../x/sys/unix/syscall_netbsd_386.go | 40 - .../x/sys/unix/syscall_netbsd_amd64.go | 40 - .../x/sys/unix/syscall_netbsd_arm.go | 40 - .../golang.org/x/sys/unix/syscall_no_getwd.go | 11 - .../golang.org/x/sys/unix/syscall_openbsd.go | 282 - .../x/sys/unix/syscall_openbsd_386.go | 40 - .../x/sys/unix/syscall_openbsd_amd64.go | 40 - .../x/sys/unix/syscall_openbsd_arm.go | 40 - .../golang.org/x/sys/unix/syscall_solaris.go | 719 -- .../x/sys/unix/syscall_solaris_amd64.go | 35 - vendor/golang.org/x/sys/unix/syscall_unix.go | 293 - .../golang.org/x/sys/unix/syscall_unix_gc.go | 15 - vendor/golang.org/x/sys/unix/types_darwin.go | 254 - .../golang.org/x/sys/unix/types_dragonfly.go | 249 - vendor/golang.org/x/sys/unix/types_freebsd.go | 372 - vendor/golang.org/x/sys/unix/types_netbsd.go | 239 - vendor/golang.org/x/sys/unix/types_openbsd.go | 251 - vendor/golang.org/x/sys/unix/types_solaris.go | 265 - .../x/sys/unix/zerrors_darwin_386.go | 1673 ---- .../x/sys/unix/zerrors_darwin_amd64.go | 1673 ---- .../x/sys/unix/zerrors_darwin_arm.go | 1673 ---- .../x/sys/unix/zerrors_darwin_arm64.go | 1673 ---- .../x/sys/unix/zerrors_dragonfly_amd64.go | 1568 ---- .../x/sys/unix/zerrors_freebsd_386.go | 1749 ---- .../x/sys/unix/zerrors_freebsd_amd64.go | 1750 ---- .../x/sys/unix/zerrors_freebsd_arm.go | 1758 ---- .../x/sys/unix/zerrors_linux_386.go | 2231 ----- .../x/sys/unix/zerrors_linux_amd64.go | 2232 ----- .../x/sys/unix/zerrors_linux_arm.go | 2236 ----- .../x/sys/unix/zerrors_linux_arm64.go | 2222 ----- .../x/sys/unix/zerrors_linux_mips.go | 2241 ----- .../x/sys/unix/zerrors_linux_mips64.go | 2241 ----- .../x/sys/unix/zerrors_linux_mips64le.go | 2241 ----- .../x/sys/unix/zerrors_linux_mipsle.go | 2241 ----- .../x/sys/unix/zerrors_linux_ppc64.go | 2294 ----- .../x/sys/unix/zerrors_linux_ppc64le.go | 2294 ----- .../x/sys/unix/zerrors_linux_s390x.go | 2293 ----- .../x/sys/unix/zerrors_linux_sparc64.go | 2142 ----- .../x/sys/unix/zerrors_netbsd_386.go | 1712 ---- .../x/sys/unix/zerrors_netbsd_amd64.go | 1702 ---- .../x/sys/unix/zerrors_netbsd_arm.go | 1691 ---- .../x/sys/unix/zerrors_openbsd_386.go | 1584 ---- .../x/sys/unix/zerrors_openbsd_amd64.go | 1583 ---- .../x/sys/unix/zerrors_openbsd_arm.go | 1586 ---- .../x/sys/unix/zerrors_solaris_amd64.go | 1489 ---- .../x/sys/unix/zsyscall_darwin_386.go | 1609 ---- .../x/sys/unix/zsyscall_darwin_amd64.go | 1609 ---- .../x/sys/unix/zsyscall_darwin_arm.go | 1609 ---- .../x/sys/unix/zsyscall_darwin_arm64.go | 1609 ---- .../x/sys/unix/zsyscall_dragonfly_amd64.go | 1440 ---- .../x/sys/unix/zsyscall_freebsd_386.go | 1877 ---- .../x/sys/unix/zsyscall_freebsd_amd64.go | 1877 ---- .../x/sys/unix/zsyscall_freebsd_arm.go | 1877 ---- .../x/sys/unix/zsyscall_linux_386.go | 1964 ----- .../x/sys/unix/zsyscall_linux_amd64.go | 2157 ----- .../x/sys/unix/zsyscall_linux_arm.go | 2066 ----- .../x/sys/unix/zsyscall_linux_arm64.go | 2029 ----- .../x/sys/unix/zsyscall_linux_mips.go | 2122 ----- .../x/sys/unix/zsyscall_linux_mips64.go | 2105 ----- .../x/sys/unix/zsyscall_linux_mips64le.go | 2105 ----- .../x/sys/unix/zsyscall_linux_mipsle.go | 2122 ----- .../x/sys/unix/zsyscall_linux_ppc64.go | 2168 ----- .../x/sys/unix/zsyscall_linux_ppc64le.go | 2168 ----- .../x/sys/unix/zsyscall_linux_s390x.go | 1948 ----- .../x/sys/unix/zsyscall_linux_sparc64.go | 1833 ---- .../x/sys/unix/zsyscall_netbsd_386.go | 1346 --- .../x/sys/unix/zsyscall_netbsd_amd64.go | 1346 --- .../x/sys/unix/zsyscall_netbsd_arm.go | 1346 --- .../x/sys/unix/zsyscall_openbsd_386.go | 1404 --- .../x/sys/unix/zsyscall_openbsd_amd64.go | 1404 --- .../x/sys/unix/zsyscall_openbsd_arm.go | 1404 --- .../x/sys/unix/zsyscall_solaris_amd64.go | 1630 ---- .../x/sys/unix/zsysctl_openbsd_386.go | 270 - .../x/sys/unix/zsysctl_openbsd_amd64.go | 270 - .../x/sys/unix/zsysctl_openbsd_arm.go | 270 - .../x/sys/unix/zsysnum_darwin_386.go | 398 - .../x/sys/unix/zsysnum_darwin_amd64.go | 398 - .../x/sys/unix/zsysnum_darwin_arm.go | 426 - .../x/sys/unix/zsysnum_darwin_arm64.go | 426 - .../x/sys/unix/zsysnum_dragonfly_amd64.go | 315 - .../x/sys/unix/zsysnum_freebsd_386.go | 353 - .../x/sys/unix/zsysnum_freebsd_amd64.go | 353 - .../x/sys/unix/zsysnum_freebsd_arm.go | 353 - .../x/sys/unix/zsysnum_linux_386.go | 390 - .../x/sys/unix/zsysnum_linux_amd64.go | 342 - .../x/sys/unix/zsysnum_linux_arm.go | 362 - .../x/sys/unix/zsysnum_linux_arm64.go | 286 - .../x/sys/unix/zsysnum_linux_mips.go | 375 - .../x/sys/unix/zsysnum_linux_mips64.go | 335 - .../x/sys/unix/zsysnum_linux_mips64le.go | 335 - .../x/sys/unix/zsysnum_linux_mipsle.go | 375 - .../x/sys/unix/zsysnum_linux_ppc64.go | 370 - .../x/sys/unix/zsysnum_linux_ppc64le.go | 370 - .../x/sys/unix/zsysnum_linux_s390x.go | 333 - .../x/sys/unix/zsysnum_linux_sparc64.go | 348 - .../x/sys/unix/zsysnum_netbsd_386.go | 274 - .../x/sys/unix/zsysnum_netbsd_amd64.go | 274 - .../x/sys/unix/zsysnum_netbsd_arm.go | 274 - .../x/sys/unix/zsysnum_openbsd_386.go | 207 - .../x/sys/unix/zsysnum_openbsd_amd64.go | 207 - .../x/sys/unix/zsysnum_openbsd_arm.go | 213 - .../x/sys/unix/zsysnum_solaris_amd64.go | 13 - .../x/sys/unix/ztypes_darwin_386.go | 462 - .../x/sys/unix/ztypes_darwin_amd64.go | 472 - .../x/sys/unix/ztypes_darwin_arm.go | 463 - .../x/sys/unix/ztypes_darwin_arm64.go | 471 - .../x/sys/unix/ztypes_dragonfly_amd64.go | 448 - .../x/sys/unix/ztypes_freebsd_386.go | 521 -- .../x/sys/unix/ztypes_freebsd_amd64.go | 524 -- .../x/sys/unix/ztypes_freebsd_arm.go | 524 -- .../golang.org/x/sys/unix/ztypes_linux_386.go | 793 -- .../x/sys/unix/ztypes_linux_amd64.go | 811 -- .../golang.org/x/sys/unix/ztypes_linux_arm.go | 782 -- .../x/sys/unix/ztypes_linux_arm64.go | 790 -- .../x/sys/unix/ztypes_linux_mips.go | 787 -- .../x/sys/unix/ztypes_linux_mips64.go | 792 -- .../x/sys/unix/ztypes_linux_mips64le.go | 792 -- .../x/sys/unix/ztypes_linux_mipsle.go | 787 -- .../x/sys/unix/ztypes_linux_ppc64.go | 800 -- .../x/sys/unix/ztypes_linux_ppc64le.go | 800 -- .../x/sys/unix/ztypes_linux_s390x.go | 817 -- .../x/sys/unix/ztypes_linux_sparc64.go | 666 -- .../x/sys/unix/ztypes_netbsd_386.go | 401 - .../x/sys/unix/ztypes_netbsd_amd64.go | 408 - .../x/sys/unix/ztypes_netbsd_arm.go | 406 - .../x/sys/unix/ztypes_openbsd_386.go | 446 - .../x/sys/unix/ztypes_openbsd_amd64.go | 453 - .../x/sys/unix/ztypes_openbsd_arm.go | 439 - .../x/sys/unix/ztypes_solaris_amd64.go | 440 - vendor/golang.org/x/text/LICENSE | 27 - vendor/golang.org/x/text/PATENTS | 22 - .../golang.org/x/text/transform/transform.go | 705 -- .../x/text/unicode/norm/composition.go | 508 -- .../x/text/unicode/norm/forminfo.go | 259 - .../golang.org/x/text/unicode/norm/input.go | 109 - vendor/golang.org/x/text/unicode/norm/iter.go | 457 - .../x/text/unicode/norm/maketables.go | 976 --- .../x/text/unicode/norm/normalize.go | 609 -- .../x/text/unicode/norm/readwriter.go | 125 - .../golang.org/x/text/unicode/norm/tables.go | 7651 ----------------- .../x/text/unicode/norm/transform.go | 88 - vendor/golang.org/x/text/unicode/norm/trie.go | 54 - .../golang.org/x/text/unicode/norm/triegen.go | 117 - vendor/vendor.json | 197 +- 401 files changed, 2091 insertions(+), 184065 deletions(-) rename vendor/github.com/{cpuguy83/go-md2man/LICENSE.md => alecthomas/units/COPYING} (60%) create mode 100644 vendor/github.com/alecthomas/units/README.md create mode 100644 vendor/github.com/alecthomas/units/bytes.go create mode 100644 vendor/github.com/alecthomas/units/doc.go create mode 100644 vendor/github.com/alecthomas/units/si.go create mode 100644 vendor/github.com/alecthomas/units/util.go delete mode 100644 vendor/github.com/cpuguy83/go-md2man/md2man/md2man.go delete mode 100644 vendor/github.com/cpuguy83/go-md2man/md2man/roff.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/AUTHORS delete mode 100644 vendor/github.com/fsnotify/fsnotify/CHANGELOG.md delete mode 100644 vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md delete mode 100644 vendor/github.com/fsnotify/fsnotify/LICENSE delete mode 100644 vendor/github.com/fsnotify/fsnotify/README.md delete mode 100644 vendor/github.com/fsnotify/fsnotify/fen.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/fsnotify.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/inotify.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/inotify_poller.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/kqueue.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go delete mode 100644 vendor/github.com/fsnotify/fsnotify/windows.go delete mode 100644 vendor/github.com/hashicorp/hcl/LICENSE delete mode 100644 vendor/github.com/hashicorp/hcl/Makefile delete mode 100644 vendor/github.com/hashicorp/hcl/README.md delete mode 100644 vendor/github.com/hashicorp/hcl/decoder.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/ast/ast.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/ast/walk.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/parser/error.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/parser/parser.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/token/position.go delete mode 100644 vendor/github.com/hashicorp/hcl/hcl/token/token.go delete mode 100644 vendor/github.com/hashicorp/hcl/json/parser/flatten.go delete mode 100644 vendor/github.com/hashicorp/hcl/json/parser/parser.go delete mode 100644 vendor/github.com/hashicorp/hcl/json/scanner/scanner.go delete mode 100644 vendor/github.com/hashicorp/hcl/json/token/position.go delete mode 100644 vendor/github.com/hashicorp/hcl/json/token/token.go delete mode 100644 vendor/github.com/hashicorp/hcl/lex.go delete mode 100644 vendor/github.com/hashicorp/hcl/parse.go delete mode 100644 vendor/github.com/inconshreveable/mousetrap/LICENSE delete mode 100644 vendor/github.com/inconshreveable/mousetrap/README.md delete mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_others.go delete mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows.go delete mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go delete mode 100644 vendor/github.com/magiconair/properties/CHANGELOG.md delete mode 100644 vendor/github.com/magiconair/properties/LICENSE delete mode 100644 vendor/github.com/magiconair/properties/README.md delete mode 100644 vendor/github.com/magiconair/properties/decode.go delete mode 100644 vendor/github.com/magiconair/properties/doc.go delete mode 100644 vendor/github.com/magiconair/properties/integrate.go delete mode 100644 vendor/github.com/magiconair/properties/lex.go delete mode 100644 vendor/github.com/magiconair/properties/load.go delete mode 100644 vendor/github.com/magiconair/properties/parser.go delete mode 100644 vendor/github.com/magiconair/properties/properties.go delete mode 100644 vendor/github.com/magiconair/properties/rangecheck.go delete mode 100644 vendor/github.com/mitchellh/mapstructure/README.md delete mode 100644 vendor/github.com/mitchellh/mapstructure/decode_hooks.go delete mode 100644 vendor/github.com/mitchellh/mapstructure/error.go delete mode 100644 vendor/github.com/mitchellh/mapstructure/mapstructure.go rename vendor/github.com/{mitchellh/mapstructure => nicksnyder/go-i18n}/LICENSE (94%) create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/bundle/bundle.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/i18n.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/language/language.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/language/operands.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/language/plural.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec_gen.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/translation/plural_translation.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/translation/single_translation.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/translation/template.go create mode 100644 vendor/github.com/nicksnyder/go-i18n/i18n/translation/translation.go delete mode 100644 vendor/github.com/russross/blackfriday/LICENSE.txt delete mode 100644 vendor/github.com/russross/blackfriday/README.md delete mode 100644 vendor/github.com/russross/blackfriday/block.go delete mode 100644 vendor/github.com/russross/blackfriday/html.go delete mode 100644 vendor/github.com/russross/blackfriday/inline.go delete mode 100644 vendor/github.com/russross/blackfriday/latex.go delete mode 100644 vendor/github.com/russross/blackfriday/markdown.go delete mode 100644 vendor/github.com/russross/blackfriday/smartypants.go delete mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE delete mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/README.md delete mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/main.go delete mode 100644 vendor/github.com/spf13/afero/LICENSE.txt delete mode 100644 vendor/github.com/spf13/afero/README.md delete mode 100644 vendor/github.com/spf13/afero/afero.go delete mode 100644 vendor/github.com/spf13/afero/basepath.go delete mode 100644 vendor/github.com/spf13/afero/cacheOnReadFs.go delete mode 100644 vendor/github.com/spf13/afero/const_bsds.go delete mode 100644 vendor/github.com/spf13/afero/const_win_unix.go delete mode 100644 vendor/github.com/spf13/afero/copyOnWriteFs.go delete mode 100644 vendor/github.com/spf13/afero/httpFs.go delete mode 100644 vendor/github.com/spf13/afero/ioutil.go delete mode 100644 vendor/github.com/spf13/afero/mem/dir.go delete mode 100644 vendor/github.com/spf13/afero/mem/dirmap.go delete mode 100644 vendor/github.com/spf13/afero/mem/file.go delete mode 100644 vendor/github.com/spf13/afero/memmap.go delete mode 100644 vendor/github.com/spf13/afero/memradix.go delete mode 100644 vendor/github.com/spf13/afero/os.go delete mode 100644 vendor/github.com/spf13/afero/path.go delete mode 100644 vendor/github.com/spf13/afero/readonlyfs.go delete mode 100644 vendor/github.com/spf13/afero/regexpfs.go delete mode 100644 vendor/github.com/spf13/afero/unionFile.go delete mode 100644 vendor/github.com/spf13/afero/util.go delete mode 100644 vendor/github.com/spf13/cast/LICENSE delete mode 100644 vendor/github.com/spf13/cast/README.md delete mode 100644 vendor/github.com/spf13/cast/cast.go delete mode 100644 vendor/github.com/spf13/cast/caste.go delete mode 100644 vendor/github.com/spf13/cobra/LICENSE.txt delete mode 100644 vendor/github.com/spf13/cobra/README.md delete mode 100644 vendor/github.com/spf13/cobra/args.go delete mode 100644 vendor/github.com/spf13/cobra/bash_completions.go delete mode 100644 vendor/github.com/spf13/cobra/bash_completions.md delete mode 100644 vendor/github.com/spf13/cobra/cobra.go delete mode 100644 vendor/github.com/spf13/cobra/command.go delete mode 100644 vendor/github.com/spf13/cobra/command_notwin.go delete mode 100644 vendor/github.com/spf13/cobra/command_win.go delete mode 100644 vendor/github.com/spf13/cobra/doc/man_docs.go delete mode 100644 vendor/github.com/spf13/cobra/doc/man_docs.md delete mode 100644 vendor/github.com/spf13/cobra/doc/md_docs.go delete mode 100644 vendor/github.com/spf13/cobra/doc/md_docs.md delete mode 100644 vendor/github.com/spf13/cobra/doc/util.go delete mode 100644 vendor/github.com/spf13/cobra/doc/yaml_docs.go delete mode 100644 vendor/github.com/spf13/cobra/doc/yaml_docs.md delete mode 100644 vendor/github.com/spf13/cobra/zsh_completions.go delete mode 100644 vendor/github.com/spf13/jwalterweatherman/LICENSE delete mode 100644 vendor/github.com/spf13/jwalterweatherman/README.md delete mode 100644 vendor/github.com/spf13/jwalterweatherman/default_notepad.go delete mode 100644 vendor/github.com/spf13/jwalterweatherman/log_counter.go delete mode 100644 vendor/github.com/spf13/jwalterweatherman/notepad.go delete mode 100644 vendor/github.com/spf13/pflag/LICENSE delete mode 100644 vendor/github.com/spf13/pflag/README.md delete mode 100644 vendor/github.com/spf13/pflag/bool.go delete mode 100644 vendor/github.com/spf13/pflag/bool_slice.go delete mode 100644 vendor/github.com/spf13/pflag/count.go delete mode 100644 vendor/github.com/spf13/pflag/duration.go delete mode 100644 vendor/github.com/spf13/pflag/flag.go delete mode 100644 vendor/github.com/spf13/pflag/float32.go delete mode 100644 vendor/github.com/spf13/pflag/float64.go delete mode 100644 vendor/github.com/spf13/pflag/golangflag.go delete mode 100644 vendor/github.com/spf13/pflag/int.go delete mode 100644 vendor/github.com/spf13/pflag/int32.go delete mode 100644 vendor/github.com/spf13/pflag/int64.go delete mode 100644 vendor/github.com/spf13/pflag/int8.go delete mode 100644 vendor/github.com/spf13/pflag/int_slice.go delete mode 100644 vendor/github.com/spf13/pflag/ip.go delete mode 100644 vendor/github.com/spf13/pflag/ip_slice.go delete mode 100644 vendor/github.com/spf13/pflag/ipmask.go delete mode 100644 vendor/github.com/spf13/pflag/ipnet.go delete mode 100644 vendor/github.com/spf13/pflag/string.go delete mode 100644 vendor/github.com/spf13/pflag/string_array.go delete mode 100644 vendor/github.com/spf13/pflag/string_slice.go delete mode 100644 vendor/github.com/spf13/pflag/uint.go delete mode 100644 vendor/github.com/spf13/pflag/uint16.go delete mode 100644 vendor/github.com/spf13/pflag/uint32.go delete mode 100644 vendor/github.com/spf13/pflag/uint64.go delete mode 100644 vendor/github.com/spf13/pflag/uint8.go delete mode 100644 vendor/github.com/spf13/pflag/uint_slice.go delete mode 100644 vendor/github.com/spf13/viper/LICENSE delete mode 100644 vendor/github.com/spf13/viper/README.md delete mode 100644 vendor/github.com/spf13/viper/flags.go delete mode 100644 vendor/github.com/spf13/viper/nohup.out delete mode 100644 vendor/github.com/spf13/viper/util.go delete mode 100644 vendor/github.com/spf13/viper/viper.go delete mode 100644 vendor/golang.org/x/sys/LICENSE delete mode 100644 vendor/golang.org/x/sys/PATENTS delete mode 100644 vendor/golang.org/x/sys/unix/README.md delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_arm64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_dragonfly_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_freebsd_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_freebsd_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_freebsd_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_arm64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_mips64x.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_mipsx.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_linux_s390x.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_netbsd_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_netbsd_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_netbsd_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_solaris_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/bluetooth_linux.go delete mode 100644 vendor/golang.org/x/sys/unix/cap_freebsd.go delete mode 100644 vendor/golang.org/x/sys/unix/constants.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_darwin.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_dragonfly.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_freebsd.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_linux.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_netbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/dev_openbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/dirent.go delete mode 100644 vendor/golang.org/x/sys/unix/endian_big.go delete mode 100644 vendor/golang.org/x/sys/unix/endian_little.go delete mode 100644 vendor/golang.org/x/sys/unix/env_unix.go delete mode 100644 vendor/golang.org/x/sys/unix/env_unset.go delete mode 100644 vendor/golang.org/x/sys/unix/errors_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/errors_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/errors_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/file_unix.go delete mode 100644 vendor/golang.org/x/sys/unix/flock.go delete mode 100644 vendor/golang.org/x/sys/unix/flock_linux_32bit.go delete mode 100644 vendor/golang.org/x/sys/unix/gccgo.go delete mode 100644 vendor/golang.org/x/sys/unix/gccgo_c.c delete mode 100644 vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go delete mode 100755 vendor/golang.org/x/sys/unix/mkall.sh delete mode 100755 vendor/golang.org/x/sys/unix/mkerrors.sh delete mode 100644 vendor/golang.org/x/sys/unix/mkpost.go delete mode 100755 vendor/golang.org/x/sys/unix/mksyscall.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksyscall_solaris.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysctl_openbsd.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysnum_darwin.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysnum_dragonfly.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysnum_freebsd.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysnum_netbsd.pl delete mode 100755 vendor/golang.org/x/sys/unix/mksysnum_openbsd.pl delete mode 100644 vendor/golang.org/x/sys/unix/openbsd_pledge.go delete mode 100644 vendor/golang.org/x/sys/unix/pagesize_unix.go delete mode 100644 vendor/golang.org/x/sys/unix/race.go delete mode 100644 vendor/golang.org/x/sys/unix/race0.go delete mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_linux.go delete mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_unix.go delete mode 100644 vendor/golang.org/x/sys/unix/str.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_bsd.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_dragonfly.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_amd64_gc.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_no_getwd.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_solaris.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_solaris_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_unix.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_unix_gc.go delete mode 100644 vendor/golang.org/x/sys/unix/types_darwin.go delete mode 100644 vendor/golang.org/x/sys/unix/types_dragonfly.go delete mode 100644 vendor/golang.org/x/sys/unix/types_freebsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_netbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_openbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_solaris.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_dragonfly_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_solaris_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_dragonfly_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_solaris_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_amd64.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_solaris_amd64.go delete mode 100644 vendor/golang.org/x/text/LICENSE delete mode 100644 vendor/golang.org/x/text/PATENTS delete mode 100644 vendor/golang.org/x/text/transform/transform.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/composition.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/forminfo.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/input.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/iter.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/maketables.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/normalize.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/readwriter.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/tables.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/transform.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/trie.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/triegen.go diff --git a/vendor/github.com/cpuguy83/go-md2man/LICENSE.md b/vendor/github.com/alecthomas/units/COPYING similarity index 60% rename from vendor/github.com/cpuguy83/go-md2man/LICENSE.md rename to vendor/github.com/alecthomas/units/COPYING index 1cade6cef6..2993ec085d 100644 --- a/vendor/github.com/cpuguy83/go-md2man/LICENSE.md +++ b/vendor/github.com/alecthomas/units/COPYING @@ -1,13 +1,11 @@ -The MIT License (MIT) +Copyright (C) 2014 Alec Thomas -Copyright (c) 2014 Brian Goff - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. diff --git a/vendor/github.com/alecthomas/units/README.md b/vendor/github.com/alecthomas/units/README.md new file mode 100644 index 0000000000..bee884e3c1 --- /dev/null +++ b/vendor/github.com/alecthomas/units/README.md @@ -0,0 +1,11 @@ +# Units - Helpful unit multipliers and functions for Go + +The goal of this package is to have functionality similar to the [time](http://golang.org/pkg/time/) package. + +It allows for code like this: + +```go +n, err := ParseBase2Bytes("1KB") +// n == 1024 +n = units.Mebibyte * 512 +``` diff --git a/vendor/github.com/alecthomas/units/bytes.go b/vendor/github.com/alecthomas/units/bytes.go new file mode 100644 index 0000000000..eaadeb8005 --- /dev/null +++ b/vendor/github.com/alecthomas/units/bytes.go @@ -0,0 +1,83 @@ +package units + +// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte, +// etc.). +type Base2Bytes int64 + +// Base-2 byte units. +const ( + Kibibyte Base2Bytes = 1024 + KiB = Kibibyte + Mebibyte = Kibibyte * 1024 + MiB = Mebibyte + Gibibyte = Mebibyte * 1024 + GiB = Gibibyte + Tebibyte = Gibibyte * 1024 + TiB = Tebibyte + Pebibyte = Tebibyte * 1024 + PiB = Pebibyte + Exbibyte = Pebibyte * 1024 + EiB = Exbibyte +) + +var ( + bytesUnitMap = MakeUnitMap("iB", "B", 1024) + oldBytesUnitMap = MakeUnitMap("B", "B", 1024) +) + +// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB +// and KiB are both 1024. +func ParseBase2Bytes(s string) (Base2Bytes, error) { + n, err := ParseUnit(s, bytesUnitMap) + if err != nil { + n, err = ParseUnit(s, oldBytesUnitMap) + } + return Base2Bytes(n), err +} + +func (b Base2Bytes) String() string { + return ToString(int64(b), 1024, "iB", "B") +} + +var ( + metricBytesUnitMap = MakeUnitMap("B", "B", 1000) +) + +// MetricBytes are SI byte units (1000 bytes in a kilobyte). +type MetricBytes SI + +// SI base-10 byte units. +const ( + Kilobyte MetricBytes = 1000 + KB = Kilobyte + Megabyte = Kilobyte * 1000 + MB = Megabyte + Gigabyte = Megabyte * 1000 + GB = Gigabyte + Terabyte = Gigabyte * 1000 + TB = Terabyte + Petabyte = Terabyte * 1000 + PB = Petabyte + Exabyte = Petabyte * 1000 + EB = Exabyte +) + +// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes. +func ParseMetricBytes(s string) (MetricBytes, error) { + n, err := ParseUnit(s, metricBytesUnitMap) + return MetricBytes(n), err +} + +func (m MetricBytes) String() string { + return ToString(int64(m), 1000, "B", "B") +} + +// ParseStrictBytes supports both iB and B suffixes for base 2 and metric, +// respectively. That is, KiB represents 1024 and KB represents 1000. +func ParseStrictBytes(s string) (int64, error) { + n, err := ParseUnit(s, bytesUnitMap) + if err != nil { + n, err = ParseUnit(s, metricBytesUnitMap) + } + return int64(n), err +} diff --git a/vendor/github.com/alecthomas/units/doc.go b/vendor/github.com/alecthomas/units/doc.go new file mode 100644 index 0000000000..156ae38672 --- /dev/null +++ b/vendor/github.com/alecthomas/units/doc.go @@ -0,0 +1,13 @@ +// Package units provides helpful unit multipliers and functions for Go. +// +// The goal of this package is to have functionality similar to the time [1] package. +// +// +// [1] http://golang.org/pkg/time/ +// +// It allows for code like this: +// +// n, err := ParseBase2Bytes("1KB") +// // n == 1024 +// n = units.Mebibyte * 512 +package units diff --git a/vendor/github.com/alecthomas/units/si.go b/vendor/github.com/alecthomas/units/si.go new file mode 100644 index 0000000000..8234a9d52c --- /dev/null +++ b/vendor/github.com/alecthomas/units/si.go @@ -0,0 +1,26 @@ +package units + +// SI units. +type SI int64 + +// SI unit multiples. +const ( + Kilo SI = 1000 + Mega = Kilo * 1000 + Giga = Mega * 1000 + Tera = Giga * 1000 + Peta = Tera * 1000 + Exa = Peta * 1000 +) + +func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 { + return map[string]float64{ + shortSuffix: 1, + "K" + suffix: float64(scale), + "M" + suffix: float64(scale * scale), + "G" + suffix: float64(scale * scale * scale), + "T" + suffix: float64(scale * scale * scale * scale), + "P" + suffix: float64(scale * scale * scale * scale * scale), + "E" + suffix: float64(scale * scale * scale * scale * scale * scale), + } +} diff --git a/vendor/github.com/alecthomas/units/util.go b/vendor/github.com/alecthomas/units/util.go new file mode 100644 index 0000000000..6527e92d16 --- /dev/null +++ b/vendor/github.com/alecthomas/units/util.go @@ -0,0 +1,138 @@ +package units + +import ( + "errors" + "fmt" + "strings" +) + +var ( + siUnits = []string{"", "K", "M", "G", "T", "P", "E"} +) + +func ToString(n int64, scale int64, suffix, baseSuffix string) string { + mn := len(siUnits) + out := make([]string, mn) + for i, m := range siUnits { + if n%scale != 0 || i == 0 && n == 0 { + s := suffix + if i == 0 { + s = baseSuffix + } + out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s) + } + n /= scale + if n == 0 { + break + } + } + return strings.Join(out, "") +} + +// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123 +var errLeadingInt = errors.New("units: bad [0-9]*") // never printed + +// leadingInt consumes the leading [0-9]* from s. +func leadingInt(s string) (x int64, rem string, err error) { + i := 0 + for ; i < len(s); i++ { + c := s[i] + if c < '0' || c > '9' { + break + } + if x >= (1<<63-10)/10 { + // overflow + return 0, "", errLeadingInt + } + x = x*10 + int64(c) - '0' + } + return x, s[i:], nil +} + +func ParseUnit(s string, unitMap map[string]float64) (int64, error) { + // [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+ + orig := s + f := float64(0) + neg := false + + // Consume [-+]? + if s != "" { + c := s[0] + if c == '-' || c == '+' { + neg = c == '-' + s = s[1:] + } + } + // Special case: if all that is left is "0", this is zero. + if s == "0" { + return 0, nil + } + if s == "" { + return 0, errors.New("units: invalid " + orig) + } + for s != "" { + g := float64(0) // this element of the sequence + + var x int64 + var err error + + // The next character must be [0-9.] + if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) { + return 0, errors.New("units: invalid " + orig) + } + // Consume [0-9]* + pl := len(s) + x, s, err = leadingInt(s) + if err != nil { + return 0, errors.New("units: invalid " + orig) + } + g = float64(x) + pre := pl != len(s) // whether we consumed anything before a period + + // Consume (\.[0-9]*)? + post := false + if s != "" && s[0] == '.' { + s = s[1:] + pl := len(s) + x, s, err = leadingInt(s) + if err != nil { + return 0, errors.New("units: invalid " + orig) + } + scale := 1.0 + for n := pl - len(s); n > 0; n-- { + scale *= 10 + } + g += float64(x) / scale + post = pl != len(s) + } + if !pre && !post { + // no digits (e.g. ".s" or "-.s") + return 0, errors.New("units: invalid " + orig) + } + + // Consume unit. + i := 0 + for ; i < len(s); i++ { + c := s[i] + if c == '.' || ('0' <= c && c <= '9') { + break + } + } + u := s[:i] + s = s[i:] + unit, ok := unitMap[u] + if !ok { + return 0, errors.New("units: unknown unit " + u + " in " + orig) + } + + f += g * unit + } + + if neg { + f = -f + } + if f < float64(-1<<63) || f > float64(1<<63-1) { + return 0, errors.New("units: overflow parsing unit") + } + return int64(f), nil +} diff --git a/vendor/github.com/cpuguy83/go-md2man/md2man/md2man.go b/vendor/github.com/cpuguy83/go-md2man/md2man/md2man.go deleted file mode 100644 index 8f44fa1550..0000000000 --- a/vendor/github.com/cpuguy83/go-md2man/md2man/md2man.go +++ /dev/null @@ -1,19 +0,0 @@ -package md2man - -import ( - "github.com/russross/blackfriday" -) - -func Render(doc []byte) []byte { - renderer := RoffRenderer(0) - extensions := 0 - extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS - extensions |= blackfriday.EXTENSION_TABLES - extensions |= blackfriday.EXTENSION_FENCED_CODE - extensions |= blackfriday.EXTENSION_AUTOLINK - extensions |= blackfriday.EXTENSION_SPACE_HEADERS - extensions |= blackfriday.EXTENSION_FOOTNOTES - extensions |= blackfriday.EXTENSION_TITLEBLOCK - - return blackfriday.Markdown(doc, renderer, extensions) -} diff --git a/vendor/github.com/cpuguy83/go-md2man/md2man/roff.go b/vendor/github.com/cpuguy83/go-md2man/md2man/roff.go deleted file mode 100644 index b8cea1c73e..0000000000 --- a/vendor/github.com/cpuguy83/go-md2man/md2man/roff.go +++ /dev/null @@ -1,282 +0,0 @@ -package md2man - -import ( - "bytes" - "fmt" - "html" - "strings" - - "github.com/russross/blackfriday" -) - -type roffRenderer struct{} - -var listCounter int - -func RoffRenderer(flags int) blackfriday.Renderer { - return &roffRenderer{} -} - -func (r *roffRenderer) GetFlags() int { - return 0 -} - -func (r *roffRenderer) TitleBlock(out *bytes.Buffer, text []byte) { - out.WriteString(".TH ") - - splitText := bytes.Split(text, []byte("\n")) - for i, line := range splitText { - line = bytes.TrimPrefix(line, []byte("% ")) - if i == 0 { - line = bytes.Replace(line, []byte("("), []byte("\" \""), 1) - line = bytes.Replace(line, []byte(")"), []byte("\" \""), 1) - } - line = append([]byte("\""), line...) - line = append(line, []byte("\" ")...) - out.Write(line) - } - out.WriteString("\n") - - // disable hyphenation - out.WriteString(".nh\n") - // disable justification (adjust text to left margin only) - out.WriteString(".ad l\n") -} - -func (r *roffRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string) { - out.WriteString("\n.PP\n.RS\n\n.nf\n") - escapeSpecialChars(out, text) - out.WriteString("\n.fi\n.RE\n") -} - -func (r *roffRenderer) BlockQuote(out *bytes.Buffer, text []byte) { - out.WriteString("\n.PP\n.RS\n") - out.Write(text) - out.WriteString("\n.RE\n") -} - -func (r *roffRenderer) BlockHtml(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (r *roffRenderer) Header(out *bytes.Buffer, text func() bool, level int, id string) { - marker := out.Len() - - switch { - case marker == 0: - // This is the doc header - out.WriteString(".TH ") - case level == 1: - out.WriteString("\n\n.SH ") - case level == 2: - out.WriteString("\n.SH ") - default: - out.WriteString("\n.SS ") - } - - if !text() { - out.Truncate(marker) - return - } -} - -func (r *roffRenderer) HRule(out *bytes.Buffer) { - out.WriteString("\n.ti 0\n\\l'\\n(.lu'\n") -} - -func (r *roffRenderer) List(out *bytes.Buffer, text func() bool, flags int) { - marker := out.Len() - if flags&blackfriday.LIST_TYPE_ORDERED != 0 { - listCounter = 1 - } - if !text() { - out.Truncate(marker) - return - } -} - -func (r *roffRenderer) ListItem(out *bytes.Buffer, text []byte, flags int) { - if flags&blackfriday.LIST_TYPE_ORDERED != 0 { - out.WriteString(fmt.Sprintf(".IP \"%3d.\" 5\n", listCounter)) - listCounter += 1 - } else { - out.WriteString(".IP \\(bu 2\n") - } - out.Write(text) - out.WriteString("\n") -} - -func (r *roffRenderer) Paragraph(out *bytes.Buffer, text func() bool) { - marker := out.Len() - out.WriteString("\n.PP\n") - if !text() { - out.Truncate(marker) - return - } - if marker != 0 { - out.WriteString("\n") - } -} - -// TODO: This might now work -func (r *roffRenderer) Table(out *bytes.Buffer, header []byte, body []byte, columnData []int) { - out.WriteString(".TS\nallbox;\n") - - out.Write(header) - out.Write(body) - out.WriteString("\n.TE\n") -} - -func (r *roffRenderer) TableRow(out *bytes.Buffer, text []byte) { - if out.Len() > 0 { - out.WriteString("\n") - } - out.Write(text) - out.WriteString("\n") -} - -func (r *roffRenderer) TableHeaderCell(out *bytes.Buffer, text []byte, align int) { - if out.Len() > 0 { - out.WriteString(" ") - } - out.Write(text) - out.WriteString(" ") -} - -// TODO: This is probably broken -func (r *roffRenderer) TableCell(out *bytes.Buffer, text []byte, align int) { - if out.Len() > 0 { - out.WriteString("\t") - } - out.Write(text) - out.WriteString("\t") -} - -func (r *roffRenderer) Footnotes(out *bytes.Buffer, text func() bool) { - -} - -func (r *roffRenderer) FootnoteItem(out *bytes.Buffer, name, text []byte, flags int) { - -} - -func (r *roffRenderer) AutoLink(out *bytes.Buffer, link []byte, kind int) { - out.WriteString("\n\\[la]") - out.Write(link) - out.WriteString("\\[ra]") -} - -func (r *roffRenderer) CodeSpan(out *bytes.Buffer, text []byte) { - out.WriteString("\\fB\\fC") - escapeSpecialChars(out, text) - out.WriteString("\\fR") -} - -func (r *roffRenderer) DoubleEmphasis(out *bytes.Buffer, text []byte) { - out.WriteString("\\fB") - out.Write(text) - out.WriteString("\\fP") -} - -func (r *roffRenderer) Emphasis(out *bytes.Buffer, text []byte) { - out.WriteString("\\fI") - out.Write(text) - out.WriteString("\\fP") -} - -func (r *roffRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) { -} - -func (r *roffRenderer) LineBreak(out *bytes.Buffer) { - out.WriteString("\n.br\n") -} - -func (r *roffRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) { - out.Write(content) - r.AutoLink(out, link, 0) -} - -func (r *roffRenderer) RawHtmlTag(out *bytes.Buffer, tag []byte) { - out.Write(tag) -} - -func (r *roffRenderer) TripleEmphasis(out *bytes.Buffer, text []byte) { - out.WriteString("\\s+2") - out.Write(text) - out.WriteString("\\s-2") -} - -func (r *roffRenderer) StrikeThrough(out *bytes.Buffer, text []byte) { -} - -func (r *roffRenderer) FootnoteRef(out *bytes.Buffer, ref []byte, id int) { - -} - -func (r *roffRenderer) Entity(out *bytes.Buffer, entity []byte) { - out.WriteString(html.UnescapeString(string(entity))) -} - -func processFooterText(text []byte) []byte { - text = bytes.TrimPrefix(text, []byte("% ")) - newText := []byte{} - textArr := strings.Split(string(text), ") ") - - for i, w := range textArr { - if i == 0 { - w = strings.Replace(w, "(", "\" \"", 1) - w = fmt.Sprintf("\"%s\"", w) - } else { - w = fmt.Sprintf(" \"%s\"", w) - } - newText = append(newText, []byte(w)...) - } - newText = append(newText, []byte(" \"\"")...) - - return newText -} - -func (r *roffRenderer) NormalText(out *bytes.Buffer, text []byte) { - escapeSpecialChars(out, text) -} - -func (r *roffRenderer) DocumentHeader(out *bytes.Buffer) { -} - -func (r *roffRenderer) DocumentFooter(out *bytes.Buffer) { -} - -func needsBackslash(c byte) bool { - for _, r := range []byte("-_&\\~") { - if c == r { - return true - } - } - return false -} - -func escapeSpecialChars(out *bytes.Buffer, text []byte) { - for i := 0; i < len(text); i++ { - // escape initial apostrophe or period - if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') { - out.WriteString("\\&") - } - - // directly copy normal characters - org := i - - for i < len(text) && !needsBackslash(text[i]) { - i++ - } - if i > org { - out.Write(text[org:i]) - } - - // escape a character - if i >= len(text) { - break - } - out.WriteByte('\\') - out.WriteByte(text[i]) - } -} diff --git a/vendor/github.com/fsnotify/fsnotify/AUTHORS b/vendor/github.com/fsnotify/fsnotify/AUTHORS deleted file mode 100644 index 0a5bf8f617..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/AUTHORS +++ /dev/null @@ -1,46 +0,0 @@ -# Names should be added to this file as -# Name or Organization -# The email address is not required for organizations. - -# You can update this list using the following command: -# -# $ git shortlog -se | awk '{print $2 " " $3 " " $4}' - -# Please keep the list sorted. - -Adrien Bustany -Amit Krishnan -Bjørn Erik Pedersen -Bruno Bigras -Caleb Spare -Case Nelson -Chris Howey -Christoffer Buchholz -Daniel Wagner-Hall -Dave Cheney -Evan Phoenix -Francisco Souza -Hari haran -John C Barstow -Kelvin Fo -Ken-ichirou MATSUZAWA -Matt Layher -Nathan Youngman -Patrick -Paul Hammond -Pawel Knap -Pieter Droogendijk -Pursuit92 -Riku Voipio -Rob Figueiredo -Slawek Ligus -Soge Zhang -Tiffany Jernigan -Tilak Sharma -Travis Cline -Tudor Golubenco -Yukang -bronze1man -debrando -henrikedwards -铁哥 diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md deleted file mode 100644 index 8c732c1d85..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md +++ /dev/null @@ -1,307 +0,0 @@ -# Changelog - -## v1.4.2 / 2016-10-10 - -* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack) - -## v1.4.1 / 2016-10-04 - -* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack) - -## v1.4.0 / 2016-10-01 - -* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie) - -## v1.3.1 / 2016-06-28 - -* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc) - -## v1.3.0 / 2016-04-19 - -* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135) - -## v1.2.10 / 2016-03-02 - -* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj) - -## v1.2.9 / 2016-01-13 - -kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep) - -## v1.2.8 / 2015-12-17 - -* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test) -* inotify: fix race in test -* enable race detection for continuous integration (Linux, Mac, Windows) - -## v1.2.5 / 2015-10-17 - -* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki) -* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken) -* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie) -* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion) - -## v1.2.1 / 2015-10-14 - -* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx) - -## v1.2.0 / 2015-02-08 - -* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD) -* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD) -* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59) - -## v1.1.1 / 2015-02-05 - -* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD) - -## v1.1.0 / 2014-12-12 - -* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43) - * add low-level functions - * only need to store flags on directories - * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13) - * done can be an unbuffered channel - * remove calls to os.NewSyscallError -* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher) -* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48) -* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) - -## v1.0.4 / 2014-09-07 - -* kqueue: add dragonfly to the build tags. -* Rename source code files, rearrange code so exported APIs are at the top. -* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang) - -## v1.0.3 / 2014-08-19 - -* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36) - -## v1.0.2 / 2014-08-17 - -* [Fix] Missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) -* [Fix] Make ./path and path equivalent. (thanks @zhsso) - -## v1.0.0 / 2014-08-15 - -* [API] Remove AddWatch on Windows, use Add. -* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30) -* Minor updates based on feedback from golint. - -## dev / 2014-07-09 - -* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify). -* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno) - -## dev / 2014-07-04 - -* kqueue: fix incorrect mutex used in Close() -* Update example to demonstrate usage of Op. - -## dev / 2014-06-28 - -* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4) -* Fix for String() method on Event (thanks Alex Brainman) -* Don't build on Plan 9 or Solaris (thanks @4ad) - -## dev / 2014-06-21 - -* Events channel of type Event rather than *Event. -* [internal] use syscall constants directly for inotify and kqueue. -* [internal] kqueue: rename events to kevents and fileEvent to event. - -## dev / 2014-06-19 - -* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally). -* [internal] remove cookie from Event struct (unused). -* [internal] Event struct has the same definition across every OS. -* [internal] remove internal watch and removeWatch methods. - -## dev / 2014-06-12 - -* [API] Renamed Watch() to Add() and RemoveWatch() to Remove(). -* [API] Pluralized channel names: Events and Errors. -* [API] Renamed FileEvent struct to Event. -* [API] Op constants replace methods like IsCreate(). - -## dev / 2014-06-12 - -* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) - -## dev / 2014-05-23 - -* [API] Remove current implementation of WatchFlags. - * current implementation doesn't take advantage of OS for efficiency - * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes - * no tests for the current implementation - * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195) - -## v0.9.3 / 2014-12-31 - -* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) - -## v0.9.2 / 2014-08-17 - -* [Backport] Fix missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) - -## v0.9.1 / 2014-06-12 - -* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) - -## v0.9.0 / 2014-01-17 - -* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany) -* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare) -* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library. - -## v0.8.12 / 2013-11-13 - -* [API] Remove FD_SET and friends from Linux adapter - -## v0.8.11 / 2013-11-02 - -* [Doc] Add Changelog [#72][] (thanks @nathany) -* [Doc] Spotlight and double modify events on macOS [#62][] (reported by @paulhammond) - -## v0.8.10 / 2013-10-19 - -* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott) -* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer) -* [Doc] specify OS-specific limits in README (thanks @debrando) - -## v0.8.9 / 2013-09-08 - -* [Doc] Contributing (thanks @nathany) -* [Doc] update package path in example code [#63][] (thanks @paulhammond) -* [Doc] GoCI badge in README (Linux only) [#60][] -* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany) - -## v0.8.8 / 2013-06-17 - -* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie) - -## v0.8.7 / 2013-06-03 - -* [API] Make syscall flags internal -* [Fix] inotify: ignore event changes -* [Fix] race in symlink test [#45][] (reported by @srid) -* [Fix] tests on Windows -* lower case error messages - -## v0.8.6 / 2013-05-23 - -* kqueue: Use EVT_ONLY flag on Darwin -* [Doc] Update README with full example - -## v0.8.5 / 2013-05-09 - -* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg) - -## v0.8.4 / 2013-04-07 - -* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz) - -## v0.8.3 / 2013-03-13 - -* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin) -* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin) - -## v0.8.2 / 2013-02-07 - -* [Doc] add Authors -* [Fix] fix data races for map access [#29][] (thanks @fsouza) - -## v0.8.1 / 2013-01-09 - -* [Fix] Windows path separators -* [Doc] BSD License - -## v0.8.0 / 2012-11-09 - -* kqueue: directory watching improvements (thanks @vmirage) -* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto) -* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr) - -## v0.7.4 / 2012-10-09 - -* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji) -* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig) -* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig) -* [Fix] kqueue: modify after recreation of file - -## v0.7.3 / 2012-09-27 - -* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage) -* [Fix] kqueue: no longer get duplicate CREATE events - -## v0.7.2 / 2012-09-01 - -* kqueue: events for created directories - -## v0.7.1 / 2012-07-14 - -* [Fix] for renaming files - -## v0.7.0 / 2012-07-02 - -* [Feature] FSNotify flags -* [Fix] inotify: Added file name back to event path - -## v0.6.0 / 2012-06-06 - -* kqueue: watch files after directory created (thanks @tmc) - -## v0.5.1 / 2012-05-22 - -* [Fix] inotify: remove all watches before Close() - -## v0.5.0 / 2012-05-03 - -* [API] kqueue: return errors during watch instead of sending over channel -* kqueue: match symlink behavior on Linux -* inotify: add `DELETE_SELF` (requested by @taralx) -* [Fix] kqueue: handle EINTR (reported by @robfig) -* [Doc] Godoc example [#1][] (thanks @davecheney) - -## v0.4.0 / 2012-03-30 - -* Go 1 released: build with go tool -* [Feature] Windows support using winfsnotify -* Windows does not have attribute change notifications -* Roll attribute notifications into IsModify - -## v0.3.0 / 2012-02-19 - -* kqueue: add files when watch directory - -## v0.2.0 / 2011-12-30 - -* update to latest Go weekly code - -## v0.1.0 / 2011-10-19 - -* kqueue: add watch on file creation to match inotify -* kqueue: create file event -* inotify: ignore `IN_IGNORED` events -* event String() -* linux: common FileEvent functions -* initial commit - -[#79]: https://github.com/howeyc/fsnotify/pull/79 -[#77]: https://github.com/howeyc/fsnotify/pull/77 -[#72]: https://github.com/howeyc/fsnotify/issues/72 -[#71]: https://github.com/howeyc/fsnotify/issues/71 -[#70]: https://github.com/howeyc/fsnotify/issues/70 -[#63]: https://github.com/howeyc/fsnotify/issues/63 -[#62]: https://github.com/howeyc/fsnotify/issues/62 -[#60]: https://github.com/howeyc/fsnotify/issues/60 -[#59]: https://github.com/howeyc/fsnotify/issues/59 -[#49]: https://github.com/howeyc/fsnotify/issues/49 -[#45]: https://github.com/howeyc/fsnotify/issues/45 -[#40]: https://github.com/howeyc/fsnotify/issues/40 -[#36]: https://github.com/howeyc/fsnotify/issues/36 -[#33]: https://github.com/howeyc/fsnotify/issues/33 -[#29]: https://github.com/howeyc/fsnotify/issues/29 -[#25]: https://github.com/howeyc/fsnotify/issues/25 -[#24]: https://github.com/howeyc/fsnotify/issues/24 -[#21]: https://github.com/howeyc/fsnotify/issues/21 diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md deleted file mode 100644 index 828a60b24b..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md +++ /dev/null @@ -1,77 +0,0 @@ -# Contributing - -## Issues - -* Request features and report bugs using the [GitHub Issue Tracker](https://github.com/fsnotify/fsnotify/issues). -* Please indicate the platform you are using fsnotify on. -* A code example to reproduce the problem is appreciated. - -## Pull Requests - -### Contributor License Agreement - -fsnotify is derived from code in the [golang.org/x/exp](https://godoc.org/golang.org/x/exp) package and it may be included [in the standard library](https://github.com/fsnotify/fsnotify/issues/1) in the future. Therefore fsnotify carries the same [LICENSE](https://github.com/fsnotify/fsnotify/blob/master/LICENSE) as Go. Contributors retain their copyright, so you need to fill out a short form before we can accept your contribution: [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual). - -Please indicate that you have signed the CLA in your pull request. - -### How fsnotify is Developed - -* Development is done on feature branches. -* Tests are run on BSD, Linux, macOS and Windows. -* Pull requests are reviewed and [applied to master][am] using [hub][]. - * Maintainers may modify or squash commits rather than asking contributors to. -* To issue a new release, the maintainers will: - * Update the CHANGELOG - * Tag a version, which will become available through gopkg.in. - -### How to Fork - -For smooth sailing, always use the original import path. Installing with `go get` makes this easy. - -1. Install from GitHub (`go get -u github.com/fsnotify/fsnotify`) -2. Create your feature branch (`git checkout -b my-new-feature`) -3. Ensure everything works and the tests pass (see below) -4. Commit your changes (`git commit -am 'Add some feature'`) - -Contribute upstream: - -1. Fork fsnotify on GitHub -2. Add your remote (`git remote add fork git@github.com:mycompany/repo.git`) -3. Push to the branch (`git push fork my-new-feature`) -4. Create a new Pull Request on GitHub - -This workflow is [thoroughly explained by Katrina Owen](https://splice.com/blog/contributing-open-source-git-repositories-go/). - -### Testing - -fsnotify uses build tags to compile different code on Linux, BSD, macOS, and Windows. - -Before doing a pull request, please do your best to test your changes on multiple platforms, and list which platforms you were able/unable to test on. - -To aid in cross-platform testing there is a Vagrantfile for Linux and BSD. - -* Install [Vagrant](http://www.vagrantup.com/) and [VirtualBox](https://www.virtualbox.org/) -* Setup [Vagrant Gopher](https://github.com/nathany/vagrant-gopher) in your `src` folder. -* Run `vagrant up` from the project folder. You can also setup just one box with `vagrant up linux` or `vagrant up bsd` (note: the BSD box doesn't support Windows hosts at this time, and NFS may prompt for your host OS password) -* Once setup, you can run the test suite on a given OS with a single command `vagrant ssh linux -c 'cd fsnotify/fsnotify; go test'`. -* When you're done, you will want to halt or destroy the Vagrant boxes. - -Notice: fsnotify file system events won't trigger in shared folders. The tests get around this limitation by using the /tmp directory. - -Right now there is no equivalent solution for Windows and macOS, but there are Windows VMs [freely available from Microsoft](http://www.modern.ie/en-us/virtualization-tools#downloads). - -### Maintainers - -Help maintaining fsnotify is welcome. To be a maintainer: - -* Submit a pull request and sign the CLA as above. -* You must be able to run the test suite on Mac, Windows, Linux and BSD. - -To keep master clean, the fsnotify project uses the "apply mail" workflow outlined in Nathaniel Talbott's post ["Merge pull request" Considered Harmful][am]. This requires installing [hub][]. - -All code changes should be internal pull requests. - -Releases are tagged using [Semantic Versioning](http://semver.org/). - -[hub]: https://github.com/github/hub -[am]: http://blog.spreedly.com/2014/06/24/merge-pull-request-considered-harmful/#.VGa5yZPF_Zs diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE deleted file mode 100644 index f21e540800..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. -Copyright (c) 2012 fsnotify Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md deleted file mode 100644 index 3993207413..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# File system notifications for Go - -[![GoDoc](https://godoc.org/github.com/fsnotify/fsnotify?status.svg)](https://godoc.org/github.com/fsnotify/fsnotify) [![Go Report Card](https://goreportcard.com/badge/github.com/fsnotify/fsnotify)](https://goreportcard.com/report/github.com/fsnotify/fsnotify) - -fsnotify utilizes [golang.org/x/sys](https://godoc.org/golang.org/x/sys) rather than `syscall` from the standard library. Ensure you have the latest version installed by running: - -```console -go get -u golang.org/x/sys/... -``` - -Cross platform: Windows, Linux, BSD and macOS. - -|Adapter |OS |Status | -|----------|----------|----------| -|inotify |Linux 2.6.27 or later, Android\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| -|kqueue |BSD, macOS, iOS\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| -|ReadDirectoryChangesW|Windows|Supported [![Build status](https://ci.appveyor.com/api/projects/status/ivwjubaih4r0udeh/branch/master?svg=true)](https://ci.appveyor.com/project/NathanYoungman/fsnotify/branch/master)| -|FSEvents |macOS |[Planned](https://github.com/fsnotify/fsnotify/issues/11)| -|FEN |Solaris 11 |[In Progress](https://github.com/fsnotify/fsnotify/issues/12)| -|fanotify |Linux 2.6.37+ | | -|USN Journals |Windows |[Maybe](https://github.com/fsnotify/fsnotify/issues/53)| -|Polling |*All* |[Maybe](https://github.com/fsnotify/fsnotify/issues/9)| - -\* Android and iOS are untested. - -Please see [the documentation](https://godoc.org/github.com/fsnotify/fsnotify) and consult the [FAQ](#faq) for usage information. - -## API stability - -fsnotify is a fork of [howeyc/fsnotify](https://godoc.org/github.com/howeyc/fsnotify) with a new API as of v1.0. The API is based on [this design document](http://goo.gl/MrYxyA). - -All [releases](https://github.com/fsnotify/fsnotify/releases) are tagged based on [Semantic Versioning](http://semver.org/). Further API changes are [planned](https://github.com/fsnotify/fsnotify/milestones), and will be tagged with a new major revision number. - -Go 1.6 supports dependencies located in the `vendor/` folder. Unless you are creating a library, it is recommended that you copy fsnotify into `vendor/github.com/fsnotify/fsnotify` within your project, and likewise for `golang.org/x/sys`. - -## Contributing - -Please refer to [CONTRIBUTING][] before opening an issue or pull request. - -## Example - -See [example_test.go](https://github.com/fsnotify/fsnotify/blob/master/example_test.go). - -## FAQ - -**When a file is moved to another directory is it still being watched?** - -No (it shouldn't be, unless you are watching where it was moved to). - -**When I watch a directory, are all subdirectories watched as well?** - -No, you must add watches for any directory you want to watch (a recursive watcher is on the roadmap [#18][]). - -**Do I have to watch the Error and Event channels in a separate goroutine?** - -As of now, yes. Looking into making this single-thread friendly (see [howeyc #7][#7]) - -**Why am I receiving multiple events for the same file on OS X?** - -Spotlight indexing on OS X can result in multiple events (see [howeyc #62][#62]). A temporary workaround is to add your folder(s) to the *Spotlight Privacy settings* until we have a native FSEvents implementation (see [#11][]). - -**How many files can be watched at once?** - -There are OS-specific limits as to how many watches can be created: -* Linux: /proc/sys/fs/inotify/max_user_watches contains the limit, reaching this limit results in a "no space left on device" error. -* BSD / OSX: sysctl variables "kern.maxfiles" and "kern.maxfilesperproc", reaching these limits results in a "too many open files" error. - -[#62]: https://github.com/howeyc/fsnotify/issues/62 -[#18]: https://github.com/fsnotify/fsnotify/issues/18 -[#11]: https://github.com/fsnotify/fsnotify/issues/11 -[#7]: https://github.com/howeyc/fsnotify/issues/7 - -[contributing]: https://github.com/fsnotify/fsnotify/blob/master/CONTRIBUTING.md - -## Related Projects - -* [notify](https://github.com/rjeczalik/notify) -* [fsevents](https://github.com/fsnotify/fsevents) - diff --git a/vendor/github.com/fsnotify/fsnotify/fen.go b/vendor/github.com/fsnotify/fsnotify/fen.go deleted file mode 100644 index ced39cb881..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/fen.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build solaris - -package fsnotify - -import ( - "errors" -) - -// Watcher watches a set of files, delivering events to a channel. -type Watcher struct { - Events chan Event - Errors chan error -} - -// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. -func NewWatcher() (*Watcher, error) { - return nil, errors.New("FEN based watcher not yet supported for fsnotify\n") -} - -// Close removes all watches and closes the events channel. -func (w *Watcher) Close() error { - return nil -} - -// Add starts watching the named file or directory (non-recursively). -func (w *Watcher) Add(name string) error { - return nil -} - -// Remove stops watching the the named file or directory (non-recursively). -func (w *Watcher) Remove(name string) error { - return nil -} diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go deleted file mode 100644 index 190bf0de57..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/fsnotify.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !plan9 - -// Package fsnotify provides a platform-independent interface for file system notifications. -package fsnotify - -import ( - "bytes" - "errors" - "fmt" -) - -// Event represents a single file system notification. -type Event struct { - Name string // Relative path to the file or directory. - Op Op // File operation that triggered the event. -} - -// Op describes a set of file operations. -type Op uint32 - -// These are the generalized file operations that can trigger a notification. -const ( - Create Op = 1 << iota - Write - Remove - Rename - Chmod -) - -func (op Op) String() string { - // Use a buffer for efficient string concatenation - var buffer bytes.Buffer - - if op&Create == Create { - buffer.WriteString("|CREATE") - } - if op&Remove == Remove { - buffer.WriteString("|REMOVE") - } - if op&Write == Write { - buffer.WriteString("|WRITE") - } - if op&Rename == Rename { - buffer.WriteString("|RENAME") - } - if op&Chmod == Chmod { - buffer.WriteString("|CHMOD") - } - if buffer.Len() == 0 { - return "" - } - return buffer.String()[1:] // Strip leading pipe -} - -// String returns a string representation of the event in the form -// "file: REMOVE|WRITE|..." -func (e Event) String() string { - return fmt.Sprintf("%q: %s", e.Name, e.Op.String()) -} - -// Common errors that can be reported by a watcher -var ErrEventOverflow = errors.New("fsnotify queue overflow") diff --git a/vendor/github.com/fsnotify/fsnotify/inotify.go b/vendor/github.com/fsnotify/fsnotify/inotify.go deleted file mode 100644 index bfa9dbc3c7..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/inotify.go +++ /dev/null @@ -1,334 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build linux - -package fsnotify - -import ( - "errors" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "sync" - "unsafe" - - "golang.org/x/sys/unix" -) - -// Watcher watches a set of files, delivering events to a channel. -type Watcher struct { - Events chan Event - Errors chan error - mu sync.Mutex // Map access - cv *sync.Cond // sync removing on rm_watch with IN_IGNORE - fd int - poller *fdPoller - watches map[string]*watch // Map of inotify watches (key: path) - paths map[int]string // Map of watched paths (key: watch descriptor) - done chan struct{} // Channel for sending a "quit message" to the reader goroutine - doneResp chan struct{} // Channel to respond to Close -} - -// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. -func NewWatcher() (*Watcher, error) { - // Create inotify fd - fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC) - if fd == -1 { - return nil, errno - } - // Create epoll - poller, err := newFdPoller(fd) - if err != nil { - unix.Close(fd) - return nil, err - } - w := &Watcher{ - fd: fd, - poller: poller, - watches: make(map[string]*watch), - paths: make(map[int]string), - Events: make(chan Event), - Errors: make(chan error), - done: make(chan struct{}), - doneResp: make(chan struct{}), - } - w.cv = sync.NewCond(&w.mu) - - go w.readEvents() - return w, nil -} - -func (w *Watcher) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - -// Close removes all watches and closes the events channel. -func (w *Watcher) Close() error { - if w.isClosed() { - return nil - } - - // Send 'close' signal to goroutine, and set the Watcher to closed. - close(w.done) - - // Wake up goroutine - w.poller.wake() - - // Wait for goroutine to close - <-w.doneResp - - return nil -} - -// Add starts watching the named file or directory (non-recursively). -func (w *Watcher) Add(name string) error { - name = filepath.Clean(name) - if w.isClosed() { - return errors.New("inotify instance already closed") - } - - const agnosticEvents = unix.IN_MOVED_TO | unix.IN_MOVED_FROM | - unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY | - unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF - - var flags uint32 = agnosticEvents - - w.mu.Lock() - watchEntry, found := w.watches[name] - w.mu.Unlock() - if found { - watchEntry.flags |= flags - flags |= unix.IN_MASK_ADD - } - wd, errno := unix.InotifyAddWatch(w.fd, name, flags) - if wd == -1 { - return errno - } - - w.mu.Lock() - w.watches[name] = &watch{wd: uint32(wd), flags: flags} - w.paths[wd] = name - w.mu.Unlock() - - return nil -} - -// Remove stops watching the named file or directory (non-recursively). -func (w *Watcher) Remove(name string) error { - name = filepath.Clean(name) - - // Fetch the watch. - w.mu.Lock() - defer w.mu.Unlock() - watch, ok := w.watches[name] - - // Remove it from inotify. - if !ok { - return fmt.Errorf("can't remove non-existent inotify watch for: %s", name) - } - // inotify_rm_watch will return EINVAL if the file has been deleted; - // the inotify will already have been removed. - // watches and pathes are deleted in ignoreLinux() implicitly and asynchronously - // by calling inotify_rm_watch() below. e.g. readEvents() goroutine receives IN_IGNORE - // so that EINVAL means that the wd is being rm_watch()ed or its file removed - // by another thread and we have not received IN_IGNORE event. - success, errno := unix.InotifyRmWatch(w.fd, watch.wd) - if success == -1 { - // TODO: Perhaps it's not helpful to return an error here in every case. - // the only two possible errors are: - // EBADF, which happens when w.fd is not a valid file descriptor of any kind. - // EINVAL, which is when fd is not an inotify descriptor or wd is not a valid watch descriptor. - // Watch descriptors are invalidated when they are removed explicitly or implicitly; - // explicitly by inotify_rm_watch, implicitly when the file they are watching is deleted. - return errno - } - - // wait until ignoreLinux() deleting maps - exists := true - for exists { - w.cv.Wait() - _, exists = w.watches[name] - } - - return nil -} - -type watch struct { - wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall) - flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags) -} - -// readEvents reads from the inotify file descriptor, converts the -// received events into Event objects and sends them via the Events channel -func (w *Watcher) readEvents() { - var ( - buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events - n int // Number of bytes read with read() - errno error // Syscall errno - ok bool // For poller.wait - ) - - defer close(w.doneResp) - defer close(w.Errors) - defer close(w.Events) - defer unix.Close(w.fd) - defer w.poller.close() - - for { - // See if we have been closed. - if w.isClosed() { - return - } - - ok, errno = w.poller.wait() - if errno != nil { - select { - case w.Errors <- errno: - case <-w.done: - return - } - continue - } - - if !ok { - continue - } - - n, errno = unix.Read(w.fd, buf[:]) - // If a signal interrupted execution, see if we've been asked to close, and try again. - // http://man7.org/linux/man-pages/man7/signal.7.html : - // "Before Linux 3.8, reads from an inotify(7) file descriptor were not restartable" - if errno == unix.EINTR { - continue - } - - // unix.Read might have been woken up by Close. If so, we're done. - if w.isClosed() { - return - } - - if n < unix.SizeofInotifyEvent { - var err error - if n == 0 { - // If EOF is received. This should really never happen. - err = io.EOF - } else if n < 0 { - // If an error occurred while reading. - err = errno - } else { - // Read was too short. - err = errors.New("notify: short read in readEvents()") - } - select { - case w.Errors <- err: - case <-w.done: - return - } - continue - } - - var offset uint32 - // We don't know how many events we just read into the buffer - // While the offset points to at least one whole event... - for offset <= uint32(n-unix.SizeofInotifyEvent) { - // Point "raw" to the event in the buffer - raw := (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) - - mask := uint32(raw.Mask) - nameLen := uint32(raw.Len) - - if mask&unix.IN_Q_OVERFLOW != 0 { - select { - case w.Errors <- ErrEventOverflow: - case <-w.done: - return - } - } - - // If the event happened to the watched directory or the watched file, the kernel - // doesn't append the filename to the event, but we would like to always fill the - // the "Name" field with a valid filename. We retrieve the path of the watch from - // the "paths" map. - w.mu.Lock() - name := w.paths[int(raw.Wd)] - w.mu.Unlock() - if nameLen > 0 { - // Point "bytes" at the first byte of the filename - bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent])) - // The filename is padded with NULL bytes. TrimRight() gets rid of those. - name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000") - } - - event := newEvent(name, mask) - - // Send the events that are not ignored on the events channel - if !event.ignoreLinux(w, raw.Wd, mask) { - select { - case w.Events <- event: - case <-w.done: - return - } - } - - // Move to the next event in the buffer - offset += unix.SizeofInotifyEvent + nameLen - } - } -} - -// Certain types of events can be "ignored" and not sent over the Events -// channel. Such as events marked ignore by the kernel, or MODIFY events -// against files that do not exist. -func (e *Event) ignoreLinux(w *Watcher, wd int32, mask uint32) bool { - // Ignore anything the inotify API says to ignore - if mask&unix.IN_IGNORED == unix.IN_IGNORED { - w.mu.Lock() - defer w.mu.Unlock() - name := w.paths[int(wd)] - delete(w.paths, int(wd)) - delete(w.watches, name) - w.cv.Broadcast() - return true - } - - // If the event is not a DELETE or RENAME, the file must exist. - // Otherwise the event is ignored. - // *Note*: this was put in place because it was seen that a MODIFY - // event was sent after the DELETE. This ignores that MODIFY and - // assumes a DELETE will come or has come if the file doesn't exist. - if !(e.Op&Remove == Remove || e.Op&Rename == Rename) { - _, statErr := os.Lstat(e.Name) - return os.IsNotExist(statErr) - } - return false -} - -// newEvent returns an platform-independent Event based on an inotify mask. -func newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO { - e.Op |= Create - } - if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE { - e.Op |= Remove - } - if mask&unix.IN_MODIFY == unix.IN_MODIFY { - e.Op |= Write - } - if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM { - e.Op |= Rename - } - if mask&unix.IN_ATTRIB == unix.IN_ATTRIB { - e.Op |= Chmod - } - return e -} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go deleted file mode 100644 index cc7db4b22e..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build linux - -package fsnotify - -import ( - "errors" - - "golang.org/x/sys/unix" -) - -type fdPoller struct { - fd int // File descriptor (as returned by the inotify_init() syscall) - epfd int // Epoll file descriptor - pipe [2]int // Pipe for waking up -} - -func emptyPoller(fd int) *fdPoller { - poller := new(fdPoller) - poller.fd = fd - poller.epfd = -1 - poller.pipe[0] = -1 - poller.pipe[1] = -1 - return poller -} - -// Create a new inotify poller. -// This creates an inotify handler, and an epoll handler. -func newFdPoller(fd int) (*fdPoller, error) { - var errno error - poller := emptyPoller(fd) - defer func() { - if errno != nil { - poller.close() - } - }() - poller.fd = fd - - // Create epoll fd - poller.epfd, errno = unix.EpollCreate1(0) - if poller.epfd == -1 { - return nil, errno - } - // Create pipe; pipe[0] is the read end, pipe[1] the write end. - errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK) - if errno != nil { - return nil, errno - } - - // Register inotify fd with epoll - event := unix.EpollEvent{ - Fd: int32(poller.fd), - Events: unix.EPOLLIN, - } - errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.fd, &event) - if errno != nil { - return nil, errno - } - - // Register pipe fd with epoll - event = unix.EpollEvent{ - Fd: int32(poller.pipe[0]), - Events: unix.EPOLLIN, - } - errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.pipe[0], &event) - if errno != nil { - return nil, errno - } - - return poller, nil -} - -// Wait using epoll. -// Returns true if something is ready to be read, -// false if there is not. -func (poller *fdPoller) wait() (bool, error) { - // 3 possible events per fd, and 2 fds, makes a maximum of 6 events. - // I don't know whether epoll_wait returns the number of events returned, - // or the total number of events ready. - // I decided to catch both by making the buffer one larger than the maximum. - events := make([]unix.EpollEvent, 7) - for { - n, errno := unix.EpollWait(poller.epfd, events, -1) - if n == -1 { - if errno == unix.EINTR { - continue - } - return false, errno - } - if n == 0 { - // If there are no events, try again. - continue - } - if n > 6 { - // This should never happen. More events were returned than should be possible. - return false, errors.New("epoll_wait returned more events than I know what to do with") - } - ready := events[:n] - epollhup := false - epollerr := false - epollin := false - for _, event := range ready { - if event.Fd == int32(poller.fd) { - if event.Events&unix.EPOLLHUP != 0 { - // This should not happen, but if it does, treat it as a wakeup. - epollhup = true - } - if event.Events&unix.EPOLLERR != 0 { - // If an error is waiting on the file descriptor, we should pretend - // something is ready to read, and let unix.Read pick up the error. - epollerr = true - } - if event.Events&unix.EPOLLIN != 0 { - // There is data to read. - epollin = true - } - } - if event.Fd == int32(poller.pipe[0]) { - if event.Events&unix.EPOLLHUP != 0 { - // Write pipe descriptor was closed, by us. This means we're closing down the - // watcher, and we should wake up. - } - if event.Events&unix.EPOLLERR != 0 { - // If an error is waiting on the pipe file descriptor. - // This is an absolute mystery, and should never ever happen. - return false, errors.New("Error on the pipe descriptor.") - } - if event.Events&unix.EPOLLIN != 0 { - // This is a regular wakeup, so we have to clear the buffer. - err := poller.clearWake() - if err != nil { - return false, err - } - } - } - } - - if epollhup || epollerr || epollin { - return true, nil - } - return false, nil - } -} - -// Close the write end of the poller. -func (poller *fdPoller) wake() error { - buf := make([]byte, 1) - n, errno := unix.Write(poller.pipe[1], buf) - if n == -1 { - if errno == unix.EAGAIN { - // Buffer is full, poller will wake. - return nil - } - return errno - } - return nil -} - -func (poller *fdPoller) clearWake() error { - // You have to be woken up a LOT in order to get to 100! - buf := make([]byte, 100) - n, errno := unix.Read(poller.pipe[0], buf) - if n == -1 { - if errno == unix.EAGAIN { - // Buffer is empty, someone else cleared our wake. - return nil - } - return errno - } - return nil -} - -// Close all poller file descriptors, but not the one passed to it. -func (poller *fdPoller) close() { - if poller.pipe[1] != -1 { - unix.Close(poller.pipe[1]) - } - if poller.pipe[0] != -1 { - unix.Close(poller.pipe[0]) - } - if poller.epfd != -1 { - unix.Close(poller.epfd) - } -} diff --git a/vendor/github.com/fsnotify/fsnotify/kqueue.go b/vendor/github.com/fsnotify/fsnotify/kqueue.go deleted file mode 100644 index c2b4acb18d..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/kqueue.go +++ /dev/null @@ -1,503 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build freebsd openbsd netbsd dragonfly darwin - -package fsnotify - -import ( - "errors" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "sync" - "time" - - "golang.org/x/sys/unix" -) - -// Watcher watches a set of files, delivering events to a channel. -type Watcher struct { - Events chan Event - Errors chan error - done chan bool // Channel for sending a "quit message" to the reader goroutine - - kq int // File descriptor (as returned by the kqueue() syscall). - - mu sync.Mutex // Protects access to watcher data - watches map[string]int // Map of watched file descriptors (key: path). - externalWatches map[string]bool // Map of watches added by user of the library. - dirFlags map[string]uint32 // Map of watched directories to fflags used in kqueue. - paths map[int]pathInfo // Map file descriptors to path names for processing kqueue events. - fileExists map[string]bool // Keep track of if we know this file exists (to stop duplicate create events). - isClosed bool // Set to true when Close() is first called -} - -type pathInfo struct { - name string - isDir bool -} - -// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. -func NewWatcher() (*Watcher, error) { - kq, err := kqueue() - if err != nil { - return nil, err - } - - w := &Watcher{ - kq: kq, - watches: make(map[string]int), - dirFlags: make(map[string]uint32), - paths: make(map[int]pathInfo), - fileExists: make(map[string]bool), - externalWatches: make(map[string]bool), - Events: make(chan Event), - Errors: make(chan error), - done: make(chan bool), - } - - go w.readEvents() - return w, nil -} - -// Close removes all watches and closes the events channel. -func (w *Watcher) Close() error { - w.mu.Lock() - if w.isClosed { - w.mu.Unlock() - return nil - } - w.isClosed = true - w.mu.Unlock() - - // copy paths to remove while locked - w.mu.Lock() - var pathsToRemove = make([]string, 0, len(w.watches)) - for name := range w.watches { - pathsToRemove = append(pathsToRemove, name) - } - w.mu.Unlock() - // unlock before calling Remove, which also locks - - var err error - for _, name := range pathsToRemove { - if e := w.Remove(name); e != nil && err == nil { - err = e - } - } - - // Send "quit" message to the reader goroutine: - w.done <- true - - return nil -} - -// Add starts watching the named file or directory (non-recursively). -func (w *Watcher) Add(name string) error { - w.mu.Lock() - w.externalWatches[name] = true - w.mu.Unlock() - _, err := w.addWatch(name, noteAllEvents) - return err -} - -// Remove stops watching the the named file or directory (non-recursively). -func (w *Watcher) Remove(name string) error { - name = filepath.Clean(name) - w.mu.Lock() - watchfd, ok := w.watches[name] - w.mu.Unlock() - if !ok { - return fmt.Errorf("can't remove non-existent kevent watch for: %s", name) - } - - const registerRemove = unix.EV_DELETE - if err := register(w.kq, []int{watchfd}, registerRemove, 0); err != nil { - return err - } - - unix.Close(watchfd) - - w.mu.Lock() - isDir := w.paths[watchfd].isDir - delete(w.watches, name) - delete(w.paths, watchfd) - delete(w.dirFlags, name) - w.mu.Unlock() - - // Find all watched paths that are in this directory that are not external. - if isDir { - var pathsToRemove []string - w.mu.Lock() - for _, path := range w.paths { - wdir, _ := filepath.Split(path.name) - if filepath.Clean(wdir) == name { - if !w.externalWatches[path.name] { - pathsToRemove = append(pathsToRemove, path.name) - } - } - } - w.mu.Unlock() - for _, name := range pathsToRemove { - // Since these are internal, not much sense in propagating error - // to the user, as that will just confuse them with an error about - // a path they did not explicitly watch themselves. - w.Remove(name) - } - } - - return nil -} - -// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE) -const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME - -// keventWaitTime to block on each read from kevent -var keventWaitTime = durationToTimespec(100 * time.Millisecond) - -// addWatch adds name to the watched file set. -// The flags are interpreted as described in kevent(2). -// Returns the real path to the file which was added, if any, which may be different from the one passed in the case of symlinks. -func (w *Watcher) addWatch(name string, flags uint32) (string, error) { - var isDir bool - // Make ./name and name equivalent - name = filepath.Clean(name) - - w.mu.Lock() - if w.isClosed { - w.mu.Unlock() - return "", errors.New("kevent instance already closed") - } - watchfd, alreadyWatching := w.watches[name] - // We already have a watch, but we can still override flags. - if alreadyWatching { - isDir = w.paths[watchfd].isDir - } - w.mu.Unlock() - - if !alreadyWatching { - fi, err := os.Lstat(name) - if err != nil { - return "", err - } - - // Don't watch sockets. - if fi.Mode()&os.ModeSocket == os.ModeSocket { - return "", nil - } - - // Don't watch named pipes. - if fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe { - return "", nil - } - - // Follow Symlinks - // Unfortunately, Linux can add bogus symlinks to watch list without - // issue, and Windows can't do symlinks period (AFAIK). To maintain - // consistency, we will act like everything is fine. There will simply - // be no file events for broken symlinks. - // Hence the returns of nil on errors. - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - name, err = filepath.EvalSymlinks(name) - if err != nil { - return "", nil - } - - w.mu.Lock() - _, alreadyWatching = w.watches[name] - w.mu.Unlock() - - if alreadyWatching { - return name, nil - } - - fi, err = os.Lstat(name) - if err != nil { - return "", nil - } - } - - watchfd, err = unix.Open(name, openMode, 0700) - if watchfd == -1 { - return "", err - } - - isDir = fi.IsDir() - } - - const registerAdd = unix.EV_ADD | unix.EV_CLEAR | unix.EV_ENABLE - if err := register(w.kq, []int{watchfd}, registerAdd, flags); err != nil { - unix.Close(watchfd) - return "", err - } - - if !alreadyWatching { - w.mu.Lock() - w.watches[name] = watchfd - w.paths[watchfd] = pathInfo{name: name, isDir: isDir} - w.mu.Unlock() - } - - if isDir { - // Watch the directory if it has not been watched before, - // or if it was watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles) - w.mu.Lock() - - watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE && - (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE) - // Store flags so this watch can be updated later - w.dirFlags[name] = flags - w.mu.Unlock() - - if watchDir { - if err := w.watchDirectoryFiles(name); err != nil { - return "", err - } - } - } - return name, nil -} - -// readEvents reads from kqueue and converts the received kevents into -// Event values that it sends down the Events channel. -func (w *Watcher) readEvents() { - eventBuffer := make([]unix.Kevent_t, 10) - - for { - // See if there is a message on the "done" channel - select { - case <-w.done: - err := unix.Close(w.kq) - if err != nil { - w.Errors <- err - } - close(w.Events) - close(w.Errors) - return - default: - } - - // Get new events - kevents, err := read(w.kq, eventBuffer, &keventWaitTime) - // EINTR is okay, the syscall was interrupted before timeout expired. - if err != nil && err != unix.EINTR { - w.Errors <- err - continue - } - - // Flush the events we received to the Events channel - for len(kevents) > 0 { - kevent := &kevents[0] - watchfd := int(kevent.Ident) - mask := uint32(kevent.Fflags) - w.mu.Lock() - path := w.paths[watchfd] - w.mu.Unlock() - event := newEvent(path.name, mask) - - if path.isDir && !(event.Op&Remove == Remove) { - // Double check to make sure the directory exists. This can happen when - // we do a rm -fr on a recursively watched folders and we receive a - // modification event first but the folder has been deleted and later - // receive the delete event - if _, err := os.Lstat(event.Name); os.IsNotExist(err) { - // mark is as delete event - event.Op |= Remove - } - } - - if event.Op&Rename == Rename || event.Op&Remove == Remove { - w.Remove(event.Name) - w.mu.Lock() - delete(w.fileExists, event.Name) - w.mu.Unlock() - } - - if path.isDir && event.Op&Write == Write && !(event.Op&Remove == Remove) { - w.sendDirectoryChangeEvents(event.Name) - } else { - // Send the event on the Events channel - w.Events <- event - } - - if event.Op&Remove == Remove { - // Look for a file that may have overwritten this. - // For example, mv f1 f2 will delete f2, then create f2. - if path.isDir { - fileDir := filepath.Clean(event.Name) - w.mu.Lock() - _, found := w.watches[fileDir] - w.mu.Unlock() - if found { - // make sure the directory exists before we watch for changes. When we - // do a recursive watch and perform rm -fr, the parent directory might - // have gone missing, ignore the missing directory and let the - // upcoming delete event remove the watch from the parent directory. - if _, err := os.Lstat(fileDir); err == nil { - w.sendDirectoryChangeEvents(fileDir) - } - } - } else { - filePath := filepath.Clean(event.Name) - if fileInfo, err := os.Lstat(filePath); err == nil { - w.sendFileCreatedEventIfNew(filePath, fileInfo) - } - } - } - - // Move to next event - kevents = kevents[1:] - } - } -} - -// newEvent returns an platform-independent Event based on kqueue Fflags. -func newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&unix.NOTE_DELETE == unix.NOTE_DELETE { - e.Op |= Remove - } - if mask&unix.NOTE_WRITE == unix.NOTE_WRITE { - e.Op |= Write - } - if mask&unix.NOTE_RENAME == unix.NOTE_RENAME { - e.Op |= Rename - } - if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB { - e.Op |= Chmod - } - return e -} - -func newCreateEvent(name string) Event { - return Event{Name: name, Op: Create} -} - -// watchDirectoryFiles to mimic inotify when adding a watch on a directory -func (w *Watcher) watchDirectoryFiles(dirPath string) error { - // Get all files - files, err := ioutil.ReadDir(dirPath) - if err != nil { - return err - } - - for _, fileInfo := range files { - filePath := filepath.Join(dirPath, fileInfo.Name()) - filePath, err = w.internalWatch(filePath, fileInfo) - if err != nil { - return err - } - - w.mu.Lock() - w.fileExists[filePath] = true - w.mu.Unlock() - } - - return nil -} - -// sendDirectoryEvents searches the directory for newly created files -// and sends them over the event channel. This functionality is to have -// the BSD version of fsnotify match Linux inotify which provides a -// create event for files created in a watched directory. -func (w *Watcher) sendDirectoryChangeEvents(dirPath string) { - // Get all files - files, err := ioutil.ReadDir(dirPath) - if err != nil { - w.Errors <- err - } - - // Search for new files - for _, fileInfo := range files { - filePath := filepath.Join(dirPath, fileInfo.Name()) - err := w.sendFileCreatedEventIfNew(filePath, fileInfo) - - if err != nil { - return - } - } -} - -// sendFileCreatedEvent sends a create event if the file isn't already being tracked. -func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fileInfo os.FileInfo) (err error) { - w.mu.Lock() - _, doesExist := w.fileExists[filePath] - w.mu.Unlock() - if !doesExist { - // Send create event - w.Events <- newCreateEvent(filePath) - } - - // like watchDirectoryFiles (but without doing another ReadDir) - filePath, err = w.internalWatch(filePath, fileInfo) - if err != nil { - return err - } - - w.mu.Lock() - w.fileExists[filePath] = true - w.mu.Unlock() - - return nil -} - -func (w *Watcher) internalWatch(name string, fileInfo os.FileInfo) (string, error) { - if fileInfo.IsDir() { - // mimic Linux providing delete events for subdirectories - // but preserve the flags used if currently watching subdirectory - w.mu.Lock() - flags := w.dirFlags[name] - w.mu.Unlock() - - flags |= unix.NOTE_DELETE | unix.NOTE_RENAME - return w.addWatch(name, flags) - } - - // watch file to mimic Linux inotify - return w.addWatch(name, noteAllEvents) -} - -// kqueue creates a new kernel event queue and returns a descriptor. -func kqueue() (kq int, err error) { - kq, err = unix.Kqueue() - if kq == -1 { - return kq, err - } - return kq, nil -} - -// register events with the queue -func register(kq int, fds []int, flags int, fflags uint32) error { - changes := make([]unix.Kevent_t, len(fds)) - - for i, fd := range fds { - // SetKevent converts int to the platform-specific types: - unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags) - changes[i].Fflags = fflags - } - - // register the events - success, err := unix.Kevent(kq, changes, nil, nil) - if success == -1 { - return err - } - return nil -} - -// read retrieves pending events, or waits until an event occurs. -// A timeout of nil blocks indefinitely, while 0 polls the queue. -func read(kq int, events []unix.Kevent_t, timeout *unix.Timespec) ([]unix.Kevent_t, error) { - n, err := unix.Kevent(kq, nil, events, timeout) - if err != nil { - return nil, err - } - return events[0:n], nil -} - -// durationToTimespec prepares a timeout value -func durationToTimespec(d time.Duration) unix.Timespec { - return unix.NsecToTimespec(d.Nanoseconds()) -} diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go deleted file mode 100644 index 7d8de14513..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build freebsd openbsd netbsd dragonfly - -package fsnotify - -import "golang.org/x/sys/unix" - -const openMode = unix.O_NONBLOCK | unix.O_RDONLY diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go deleted file mode 100644 index 9139e17161..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build darwin - -package fsnotify - -import "golang.org/x/sys/unix" - -// note: this constant is not defined on BSD -const openMode = unix.O_EVTONLY diff --git a/vendor/github.com/fsnotify/fsnotify/windows.go b/vendor/github.com/fsnotify/fsnotify/windows.go deleted file mode 100644 index 09436f31d8..0000000000 --- a/vendor/github.com/fsnotify/fsnotify/windows.go +++ /dev/null @@ -1,561 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build windows - -package fsnotify - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "runtime" - "sync" - "syscall" - "unsafe" -) - -// Watcher watches a set of files, delivering events to a channel. -type Watcher struct { - Events chan Event - Errors chan error - isClosed bool // Set to true when Close() is first called - mu sync.Mutex // Map access - port syscall.Handle // Handle to completion port - watches watchMap // Map of watches (key: i-number) - input chan *input // Inputs to the reader are sent on this channel - quit chan chan<- error -} - -// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. -func NewWatcher() (*Watcher, error) { - port, e := syscall.CreateIoCompletionPort(syscall.InvalidHandle, 0, 0, 0) - if e != nil { - return nil, os.NewSyscallError("CreateIoCompletionPort", e) - } - w := &Watcher{ - port: port, - watches: make(watchMap), - input: make(chan *input, 1), - Events: make(chan Event, 50), - Errors: make(chan error), - quit: make(chan chan<- error, 1), - } - go w.readEvents() - return w, nil -} - -// Close removes all watches and closes the events channel. -func (w *Watcher) Close() error { - if w.isClosed { - return nil - } - w.isClosed = true - - // Send "quit" message to the reader goroutine - ch := make(chan error) - w.quit <- ch - if err := w.wakeupReader(); err != nil { - return err - } - return <-ch -} - -// Add starts watching the named file or directory (non-recursively). -func (w *Watcher) Add(name string) error { - if w.isClosed { - return errors.New("watcher already closed") - } - in := &input{ - op: opAddWatch, - path: filepath.Clean(name), - flags: sysFSALLEVENTS, - reply: make(chan error), - } - w.input <- in - if err := w.wakeupReader(); err != nil { - return err - } - return <-in.reply -} - -// Remove stops watching the the named file or directory (non-recursively). -func (w *Watcher) Remove(name string) error { - in := &input{ - op: opRemoveWatch, - path: filepath.Clean(name), - reply: make(chan error), - } - w.input <- in - if err := w.wakeupReader(); err != nil { - return err - } - return <-in.reply -} - -const ( - // Options for AddWatch - sysFSONESHOT = 0x80000000 - sysFSONLYDIR = 0x1000000 - - // Events - sysFSACCESS = 0x1 - sysFSALLEVENTS = 0xfff - sysFSATTRIB = 0x4 - sysFSCLOSE = 0x18 - sysFSCREATE = 0x100 - sysFSDELETE = 0x200 - sysFSDELETESELF = 0x400 - sysFSMODIFY = 0x2 - sysFSMOVE = 0xc0 - sysFSMOVEDFROM = 0x40 - sysFSMOVEDTO = 0x80 - sysFSMOVESELF = 0x800 - - // Special events - sysFSIGNORED = 0x8000 - sysFSQOVERFLOW = 0x4000 -) - -func newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO { - e.Op |= Create - } - if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF { - e.Op |= Remove - } - if mask&sysFSMODIFY == sysFSMODIFY { - e.Op |= Write - } - if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM { - e.Op |= Rename - } - if mask&sysFSATTRIB == sysFSATTRIB { - e.Op |= Chmod - } - return e -} - -const ( - opAddWatch = iota - opRemoveWatch -) - -const ( - provisional uint64 = 1 << (32 + iota) -) - -type input struct { - op int - path string - flags uint32 - reply chan error -} - -type inode struct { - handle syscall.Handle - volume uint32 - index uint64 -} - -type watch struct { - ov syscall.Overlapped - ino *inode // i-number - path string // Directory path - mask uint64 // Directory itself is being watched with these notify flags - names map[string]uint64 // Map of names being watched and their notify flags - rename string // Remembers the old name while renaming a file - buf [4096]byte -} - -type indexMap map[uint64]*watch -type watchMap map[uint32]indexMap - -func (w *Watcher) wakeupReader() error { - e := syscall.PostQueuedCompletionStatus(w.port, 0, 0, nil) - if e != nil { - return os.NewSyscallError("PostQueuedCompletionStatus", e) - } - return nil -} - -func getDir(pathname string) (dir string, err error) { - attr, e := syscall.GetFileAttributes(syscall.StringToUTF16Ptr(pathname)) - if e != nil { - return "", os.NewSyscallError("GetFileAttributes", e) - } - if attr&syscall.FILE_ATTRIBUTE_DIRECTORY != 0 { - dir = pathname - } else { - dir, _ = filepath.Split(pathname) - dir = filepath.Clean(dir) - } - return -} - -func getIno(path string) (ino *inode, err error) { - h, e := syscall.CreateFile(syscall.StringToUTF16Ptr(path), - syscall.FILE_LIST_DIRECTORY, - syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE, - nil, syscall.OPEN_EXISTING, - syscall.FILE_FLAG_BACKUP_SEMANTICS|syscall.FILE_FLAG_OVERLAPPED, 0) - if e != nil { - return nil, os.NewSyscallError("CreateFile", e) - } - var fi syscall.ByHandleFileInformation - if e = syscall.GetFileInformationByHandle(h, &fi); e != nil { - syscall.CloseHandle(h) - return nil, os.NewSyscallError("GetFileInformationByHandle", e) - } - ino = &inode{ - handle: h, - volume: fi.VolumeSerialNumber, - index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow), - } - return ino, nil -} - -// Must run within the I/O thread. -func (m watchMap) get(ino *inode) *watch { - if i := m[ino.volume]; i != nil { - return i[ino.index] - } - return nil -} - -// Must run within the I/O thread. -func (m watchMap) set(ino *inode, watch *watch) { - i := m[ino.volume] - if i == nil { - i = make(indexMap) - m[ino.volume] = i - } - i[ino.index] = watch -} - -// Must run within the I/O thread. -func (w *Watcher) addWatch(pathname string, flags uint64) error { - dir, err := getDir(pathname) - if err != nil { - return err - } - if flags&sysFSONLYDIR != 0 && pathname != dir { - return nil - } - ino, err := getIno(dir) - if err != nil { - return err - } - w.mu.Lock() - watchEntry := w.watches.get(ino) - w.mu.Unlock() - if watchEntry == nil { - if _, e := syscall.CreateIoCompletionPort(ino.handle, w.port, 0, 0); e != nil { - syscall.CloseHandle(ino.handle) - return os.NewSyscallError("CreateIoCompletionPort", e) - } - watchEntry = &watch{ - ino: ino, - path: dir, - names: make(map[string]uint64), - } - w.mu.Lock() - w.watches.set(ino, watchEntry) - w.mu.Unlock() - flags |= provisional - } else { - syscall.CloseHandle(ino.handle) - } - if pathname == dir { - watchEntry.mask |= flags - } else { - watchEntry.names[filepath.Base(pathname)] |= flags - } - if err = w.startRead(watchEntry); err != nil { - return err - } - if pathname == dir { - watchEntry.mask &= ^provisional - } else { - watchEntry.names[filepath.Base(pathname)] &= ^provisional - } - return nil -} - -// Must run within the I/O thread. -func (w *Watcher) remWatch(pathname string) error { - dir, err := getDir(pathname) - if err != nil { - return err - } - ino, err := getIno(dir) - if err != nil { - return err - } - w.mu.Lock() - watch := w.watches.get(ino) - w.mu.Unlock() - if watch == nil { - return fmt.Errorf("can't remove non-existent watch for: %s", pathname) - } - if pathname == dir { - w.sendEvent(watch.path, watch.mask&sysFSIGNORED) - watch.mask = 0 - } else { - name := filepath.Base(pathname) - w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED) - delete(watch.names, name) - } - return w.startRead(watch) -} - -// Must run within the I/O thread. -func (w *Watcher) deleteWatch(watch *watch) { - for name, mask := range watch.names { - if mask&provisional == 0 { - w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED) - } - delete(watch.names, name) - } - if watch.mask != 0 { - if watch.mask&provisional == 0 { - w.sendEvent(watch.path, watch.mask&sysFSIGNORED) - } - watch.mask = 0 - } -} - -// Must run within the I/O thread. -func (w *Watcher) startRead(watch *watch) error { - if e := syscall.CancelIo(watch.ino.handle); e != nil { - w.Errors <- os.NewSyscallError("CancelIo", e) - w.deleteWatch(watch) - } - mask := toWindowsFlags(watch.mask) - for _, m := range watch.names { - mask |= toWindowsFlags(m) - } - if mask == 0 { - if e := syscall.CloseHandle(watch.ino.handle); e != nil { - w.Errors <- os.NewSyscallError("CloseHandle", e) - } - w.mu.Lock() - delete(w.watches[watch.ino.volume], watch.ino.index) - w.mu.Unlock() - return nil - } - e := syscall.ReadDirectoryChanges(watch.ino.handle, &watch.buf[0], - uint32(unsafe.Sizeof(watch.buf)), false, mask, nil, &watch.ov, 0) - if e != nil { - err := os.NewSyscallError("ReadDirectoryChanges", e) - if e == syscall.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 { - // Watched directory was probably removed - if w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) { - if watch.mask&sysFSONESHOT != 0 { - watch.mask = 0 - } - } - err = nil - } - w.deleteWatch(watch) - w.startRead(watch) - return err - } - return nil -} - -// readEvents reads from the I/O completion port, converts the -// received events into Event objects and sends them via the Events channel. -// Entry point to the I/O thread. -func (w *Watcher) readEvents() { - var ( - n, key uint32 - ov *syscall.Overlapped - ) - runtime.LockOSThread() - - for { - e := syscall.GetQueuedCompletionStatus(w.port, &n, &key, &ov, syscall.INFINITE) - watch := (*watch)(unsafe.Pointer(ov)) - - if watch == nil { - select { - case ch := <-w.quit: - w.mu.Lock() - var indexes []indexMap - for _, index := range w.watches { - indexes = append(indexes, index) - } - w.mu.Unlock() - for _, index := range indexes { - for _, watch := range index { - w.deleteWatch(watch) - w.startRead(watch) - } - } - var err error - if e := syscall.CloseHandle(w.port); e != nil { - err = os.NewSyscallError("CloseHandle", e) - } - close(w.Events) - close(w.Errors) - ch <- err - return - case in := <-w.input: - switch in.op { - case opAddWatch: - in.reply <- w.addWatch(in.path, uint64(in.flags)) - case opRemoveWatch: - in.reply <- w.remWatch(in.path) - } - default: - } - continue - } - - switch e { - case syscall.ERROR_MORE_DATA: - if watch == nil { - w.Errors <- errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer") - } else { - // The i/o succeeded but the buffer is full. - // In theory we should be building up a full packet. - // In practice we can get away with just carrying on. - n = uint32(unsafe.Sizeof(watch.buf)) - } - case syscall.ERROR_ACCESS_DENIED: - // Watched directory was probably removed - w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) - w.deleteWatch(watch) - w.startRead(watch) - continue - case syscall.ERROR_OPERATION_ABORTED: - // CancelIo was called on this handle - continue - default: - w.Errors <- os.NewSyscallError("GetQueuedCompletionPort", e) - continue - case nil: - } - - var offset uint32 - for { - if n == 0 { - w.Events <- newEvent("", sysFSQOVERFLOW) - w.Errors <- errors.New("short read in readEvents()") - break - } - - // Point "raw" to the event in the buffer - raw := (*syscall.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset])) - buf := (*[syscall.MAX_PATH]uint16)(unsafe.Pointer(&raw.FileName)) - name := syscall.UTF16ToString(buf[:raw.FileNameLength/2]) - fullname := filepath.Join(watch.path, name) - - var mask uint64 - switch raw.Action { - case syscall.FILE_ACTION_REMOVED: - mask = sysFSDELETESELF - case syscall.FILE_ACTION_MODIFIED: - mask = sysFSMODIFY - case syscall.FILE_ACTION_RENAMED_OLD_NAME: - watch.rename = name - case syscall.FILE_ACTION_RENAMED_NEW_NAME: - if watch.names[watch.rename] != 0 { - watch.names[name] |= watch.names[watch.rename] - delete(watch.names, watch.rename) - mask = sysFSMOVESELF - } - } - - sendNameEvent := func() { - if w.sendEvent(fullname, watch.names[name]&mask) { - if watch.names[name]&sysFSONESHOT != 0 { - delete(watch.names, name) - } - } - } - if raw.Action != syscall.FILE_ACTION_RENAMED_NEW_NAME { - sendNameEvent() - } - if raw.Action == syscall.FILE_ACTION_REMOVED { - w.sendEvent(fullname, watch.names[name]&sysFSIGNORED) - delete(watch.names, name) - } - if w.sendEvent(fullname, watch.mask&toFSnotifyFlags(raw.Action)) { - if watch.mask&sysFSONESHOT != 0 { - watch.mask = 0 - } - } - if raw.Action == syscall.FILE_ACTION_RENAMED_NEW_NAME { - fullname = filepath.Join(watch.path, watch.rename) - sendNameEvent() - } - - // Move to the next event in the buffer - if raw.NextEntryOffset == 0 { - break - } - offset += raw.NextEntryOffset - - // Error! - if offset >= n { - w.Errors <- errors.New("Windows system assumed buffer larger than it is, events have likely been missed.") - break - } - } - - if err := w.startRead(watch); err != nil { - w.Errors <- err - } - } -} - -func (w *Watcher) sendEvent(name string, mask uint64) bool { - if mask == 0 { - return false - } - event := newEvent(name, uint32(mask)) - select { - case ch := <-w.quit: - w.quit <- ch - case w.Events <- event: - } - return true -} - -func toWindowsFlags(mask uint64) uint32 { - var m uint32 - if mask&sysFSACCESS != 0 { - m |= syscall.FILE_NOTIFY_CHANGE_LAST_ACCESS - } - if mask&sysFSMODIFY != 0 { - m |= syscall.FILE_NOTIFY_CHANGE_LAST_WRITE - } - if mask&sysFSATTRIB != 0 { - m |= syscall.FILE_NOTIFY_CHANGE_ATTRIBUTES - } - if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 { - m |= syscall.FILE_NOTIFY_CHANGE_FILE_NAME | syscall.FILE_NOTIFY_CHANGE_DIR_NAME - } - return m -} - -func toFSnotifyFlags(action uint32) uint64 { - switch action { - case syscall.FILE_ACTION_ADDED: - return sysFSCREATE - case syscall.FILE_ACTION_REMOVED: - return sysFSDELETE - case syscall.FILE_ACTION_MODIFIED: - return sysFSMODIFY - case syscall.FILE_ACTION_RENAMED_OLD_NAME: - return sysFSMOVEDFROM - case syscall.FILE_ACTION_RENAMED_NEW_NAME: - return sysFSMOVEDTO - } - return 0 -} diff --git a/vendor/github.com/hashicorp/hcl/LICENSE b/vendor/github.com/hashicorp/hcl/LICENSE deleted file mode 100644 index c33dcc7c92..0000000000 --- a/vendor/github.com/hashicorp/hcl/LICENSE +++ /dev/null @@ -1,354 +0,0 @@ -Mozilla Public License, version 2.0 - -1. Definitions - -1.1. “Contributor” - - means each individual or legal entity that creates, contributes to the - creation of, or owns Covered Software. - -1.2. “Contributor Version” - - means the combination of the Contributions of others (if any) used by a - Contributor and that particular Contributor’s Contribution. - -1.3. “Contribution” - - means Covered Software of a particular Contributor. - -1.4. “Covered Software” - - means Source Code Form to which the initial Contributor has attached the - notice in Exhibit A, the Executable Form of such Source Code Form, and - Modifications of such Source Code Form, in each case including portions - thereof. - -1.5. “Incompatible With Secondary Licenses” - means - - a. that the initial Contributor has attached the notice described in - Exhibit B to the Covered Software; or - - b. that the Covered Software was made available under the terms of version - 1.1 or earlier of the License, but not also under the terms of a - Secondary License. - -1.6. “Executable Form” - - means any form of the work other than Source Code Form. - -1.7. “Larger Work” - - means a work that combines Covered Software with other material, in a separate - file or files, that is not Covered Software. - -1.8. “License” - - means this document. - -1.9. “Licensable” - - means having the right to grant, to the maximum extent possible, whether at the - time of the initial grant or subsequently, any and all of the rights conveyed by - this License. - -1.10. “Modifications” - - means any of the following: - - a. any file in Source Code Form that results from an addition to, deletion - from, or modification of the contents of Covered Software; or - - b. any new file in Source Code Form that contains any Covered Software. - -1.11. “Patent Claims” of a Contributor - - means any patent claim(s), including without limitation, method, process, - and apparatus claims, in any patent Licensable by such Contributor that - would be infringed, but for the grant of the License, by the making, - using, selling, offering for sale, having made, import, or transfer of - either its Contributions or its Contributor Version. - -1.12. “Secondary License” - - means either the GNU General Public License, Version 2.0, the GNU Lesser - General Public License, Version 2.1, the GNU Affero General Public - License, Version 3.0, or any later versions of those licenses. - -1.13. “Source Code Form” - - means the form of the work preferred for making modifications. - -1.14. “You” (or “Your”) - - means an individual or a legal entity exercising rights under this - License. For legal entities, “You” includes any entity that controls, is - controlled by, or is under common control with You. For purposes of this - definition, “control” means (a) the power, direct or indirect, to cause - the direction or management of such entity, whether by contract or - otherwise, or (b) ownership of more than fifty percent (50%) of the - outstanding shares or beneficial ownership of such entity. - - -2. License Grants and Conditions - -2.1. Grants - - Each Contributor hereby grants You a world-wide, royalty-free, - non-exclusive license: - - a. under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or as - part of a Larger Work; and - - b. under Patent Claims of such Contributor to make, use, sell, offer for - sale, have made, import, and otherwise transfer either its Contributions - or its Contributor Version. - -2.2. Effective Date - - The licenses granted in Section 2.1 with respect to any Contribution become - effective for each Contribution on the date the Contributor first distributes - such Contribution. - -2.3. Limitations on Grant Scope - - The licenses granted in this Section 2 are the only rights granted under this - License. No additional rights or licenses will be implied from the distribution - or licensing of Covered Software under this License. Notwithstanding Section - 2.1(b) above, no patent license is granted by a Contributor: - - a. for any code that a Contributor has removed from Covered Software; or - - b. for infringements caused by: (i) Your and any other third party’s - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - - c. under Patent Claims infringed by Covered Software in the absence of its - Contributions. - - This License does not grant any rights in the trademarks, service marks, or - logos of any Contributor (except as may be necessary to comply with the - notice requirements in Section 3.4). - -2.4. Subsequent Licenses - - No Contributor makes additional grants as a result of Your choice to - distribute the Covered Software under a subsequent version of this License - (see Section 10.2) or under the terms of a Secondary License (if permitted - under the terms of Section 3.3). - -2.5. Representation - - Each Contributor represents that the Contributor believes its Contributions - are its original creation(s) or it has sufficient rights to grant the - rights to its Contributions conveyed by this License. - -2.6. Fair Use - - This License is not intended to limit any rights You have under applicable - copyright doctrines of fair use, fair dealing, or other equivalents. - -2.7. Conditions - - Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in - Section 2.1. - - -3. Responsibilities - -3.1. Distribution of Source Form - - All distribution of Covered Software in Source Code Form, including any - Modifications that You create or to which You contribute, must be under the - terms of this License. You must inform recipients that the Source Code Form - of the Covered Software is governed by the terms of this License, and how - they can obtain a copy of this License. You may not attempt to alter or - restrict the recipients’ rights in the Source Code Form. - -3.2. Distribution of Executable Form - - If You distribute Covered Software in Executable Form then: - - a. such Covered Software must also be made available in Source Code Form, - as described in Section 3.1, and You must inform recipients of the - Executable Form how they can obtain a copy of such Source Code Form by - reasonable means in a timely manner, at a charge no more than the cost - of distribution to the recipient; and - - b. You may distribute such Executable Form under the terms of this License, - or sublicense it under different terms, provided that the license for - the Executable Form does not attempt to limit or alter the recipients’ - rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - - You may create and distribute a Larger Work under terms of Your choice, - provided that You also comply with the requirements of this License for the - Covered Software. If the Larger Work is a combination of Covered Software - with a work governed by one or more Secondary Licenses, and the Covered - Software is not Incompatible With Secondary Licenses, this License permits - You to additionally distribute such Covered Software under the terms of - such Secondary License(s), so that the recipient of the Larger Work may, at - their option, further distribute the Covered Software under the terms of - either this License or such Secondary License(s). - -3.4. Notices - - You may not remove or alter the substance of any license notices (including - copyright notices, patent notices, disclaimers of warranty, or limitations - of liability) contained within the Source Code Form of the Covered - Software, except that You may alter any license notices to the extent - required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - - You may choose to offer, and to charge a fee for, warranty, support, - indemnity or liability obligations to one or more recipients of Covered - Software. However, You may do so only on Your own behalf, and not on behalf - of any Contributor. You must make it absolutely clear that any such - warranty, support, indemnity, or liability obligation is offered by You - alone, and You hereby agree to indemnify every Contributor for any - liability incurred by such Contributor as a result of warranty, support, - indemnity or liability terms You offer. You may include additional - disclaimers of warranty and limitations of liability specific to any - jurisdiction. - -4. Inability to Comply Due to Statute or Regulation - - If it is impossible for You to comply with any of the terms of this License - with respect to some or all of the Covered Software due to statute, judicial - order, or regulation then You must: (a) comply with the terms of this License - to the maximum extent possible; and (b) describe the limitations and the code - they affect. Such description must be placed in a text file included with all - distributions of the Covered Software under this License. Except to the - extent prohibited by statute or regulation, such description must be - sufficiently detailed for a recipient of ordinary skill to be able to - understand it. - -5. Termination - -5.1. The rights granted under this License will terminate automatically if You - fail to comply with any of its terms. However, if You become compliant, - then the rights granted under this License from a particular Contributor - are reinstated (a) provisionally, unless and until such Contributor - explicitly and finally terminates Your grants, and (b) on an ongoing basis, - if such Contributor fails to notify You of the non-compliance by some - reasonable means prior to 60 days after You have come back into compliance. - Moreover, Your grants from a particular Contributor are reinstated on an - ongoing basis if such Contributor notifies You of the non-compliance by - some reasonable means, this is the first time You have received notice of - non-compliance with this License from such Contributor, and You become - compliant prior to 30 days after Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent - infringement claim (excluding declaratory judgment actions, counter-claims, - and cross-claims) alleging that a Contributor Version directly or - indirectly infringes any patent, then the rights granted to You by any and - all Contributors for the Covered Software under Section 2.1 of this License - shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user - license agreements (excluding distributors and resellers) which have been - validly granted by You or Your distributors under this License prior to - termination shall survive termination. - -6. Disclaimer of Warranty - - Covered Software is provided under this License on an “as is” basis, without - warranty of any kind, either expressed, implied, or statutory, including, - without limitation, warranties that the Covered Software is free of defects, - merchantable, fit for a particular purpose or non-infringing. The entire - risk as to the quality and performance of the Covered Software is with You. - Should any Covered Software prove defective in any respect, You (not any - Contributor) assume the cost of any necessary servicing, repair, or - correction. This disclaimer of warranty constitutes an essential part of this - License. No use of any Covered Software is authorized under this License - except under this disclaimer. - -7. Limitation of Liability - - Under no circumstances and under no legal theory, whether tort (including - negligence), contract, or otherwise, shall any Contributor, or anyone who - distributes Covered Software as permitted above, be liable to You for any - direct, indirect, special, incidental, or consequential damages of any - character including, without limitation, damages for lost profits, loss of - goodwill, work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses, even if such party shall have been - informed of the possibility of such damages. This limitation of liability - shall not apply to liability for death or personal injury resulting from such - party’s negligence to the extent applicable law prohibits such limitation. - Some jurisdictions do not allow the exclusion or limitation of incidental or - consequential damages, so this exclusion and limitation may not apply to You. - -8. Litigation - - Any litigation relating to this License may be brought only in the courts of - a jurisdiction where the defendant maintains its principal place of business - and such litigation shall be governed by laws of that jurisdiction, without - reference to its conflict-of-law provisions. Nothing in this Section shall - prevent a party’s ability to bring cross-claims or counter-claims. - -9. Miscellaneous - - This License represents the complete agreement concerning the subject matter - hereof. If any provision of this License is held to be unenforceable, such - provision shall be reformed only to the extent necessary to make it - enforceable. Any law or regulation which provides that the language of a - contract shall be construed against the drafter shall not be used to construe - this License against a Contributor. - - -10. Versions of the License - -10.1. New Versions - - Mozilla Foundation is the license steward. Except as provided in Section - 10.3, no one other than the license steward has the right to modify or - publish new versions of this License. Each version will be given a - distinguishing version number. - -10.2. Effect of New Versions - - You may distribute the Covered Software under the terms of the version of - the License under which You originally received the Covered Software, or - under the terms of any subsequent version published by the license - steward. - -10.3. Modified Versions - - If you create software not governed by this License, and you want to - create a new license for such software, you may create and use a modified - version of this License if you rename the license and remove any - references to the name of the license steward (except to note that such - modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses - If You choose to distribute Source Code Form that is Incompatible With - Secondary Licenses under the terms of this version of the License, the - notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice - - This Source Code Form is subject to the - terms of the Mozilla Public License, v. - 2.0. If a copy of the MPL was not - distributed with this file, You can - obtain one at - http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular file, then -You may include the notice in a location (such as a LICENSE file in a relevant -directory) where a recipient would be likely to look for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - “Incompatible With Secondary Licenses” Notice - - This Source Code Form is “Incompatible - With Secondary Licenses”, as defined by - the Mozilla Public License, v. 2.0. - diff --git a/vendor/github.com/hashicorp/hcl/Makefile b/vendor/github.com/hashicorp/hcl/Makefile deleted file mode 100644 index 84fd743f5c..0000000000 --- a/vendor/github.com/hashicorp/hcl/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -TEST?=./... - -default: test - -fmt: generate - go fmt ./... - -test: generate - go get -t ./... - go test $(TEST) $(TESTARGS) - -generate: - go generate ./... - -updatedeps: - go get -u golang.org/x/tools/cmd/stringer - -.PHONY: default generate test updatedeps diff --git a/vendor/github.com/hashicorp/hcl/README.md b/vendor/github.com/hashicorp/hcl/README.md deleted file mode 100644 index c8223326dd..0000000000 --- a/vendor/github.com/hashicorp/hcl/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# HCL - -[![GoDoc](https://godoc.org/github.com/hashicorp/hcl?status.png)](https://godoc.org/github.com/hashicorp/hcl) [![Build Status](https://travis-ci.org/hashicorp/hcl.svg?branch=master)](https://travis-ci.org/hashicorp/hcl) - -HCL (HashiCorp Configuration Language) is a configuration language built -by HashiCorp. The goal of HCL is to build a structured configuration language -that is both human and machine friendly for use with command-line tools, but -specifically targeted towards DevOps tools, servers, etc. - -HCL is also fully JSON compatible. That is, JSON can be used as completely -valid input to a system expecting HCL. This helps makes systems -interoperable with other systems. - -HCL is heavily inspired by -[libucl](https://github.com/vstakhov/libucl), -nginx configuration, and others similar. - -## Why? - -A common question when viewing HCL is to ask the question: why not -JSON, YAML, etc.? - -Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com) -used a variety of configuration languages from full programming languages -such as Ruby to complete data structure languages such as JSON. What we -learned is that some people wanted human-friendly configuration languages -and some people wanted machine-friendly languages. - -JSON fits a nice balance in this, but is fairly verbose and most -importantly doesn't support comments. With YAML, we found that beginners -had a really hard time determining what the actual structure was, and -ended up guessing more often than not whether to use a hyphen, colon, etc. -in order to represent some configuration key. - -Full programming languages such as Ruby enable complex behavior -a configuration language shouldn't usually allow, and also forces -people to learn some set of Ruby. - -Because of this, we decided to create our own configuration language -that is JSON-compatible. Our configuration language (HCL) is designed -to be written and modified by humans. The API for HCL allows JSON -as an input so that it is also machine-friendly (machines can generate -JSON instead of trying to generate HCL). - -Our goal with HCL is not to alienate other configuration languages. -It is instead to provide HCL as a specialized language for our tools, -and JSON as the interoperability layer. - -## Syntax - -For a complete grammar, please see the parser itself. A high-level overview -of the syntax and grammar is listed here. - - * Single line comments start with `#` or `//` - - * Multi-line comments are wrapped in `/*` and `*/`. Nested block comments - are not allowed. A multi-line comment (also known as a block comment) - terminates at the first `*/` found. - - * Values are assigned with the syntax `key = value` (whitespace doesn't - matter). The value can be any primitive: a string, number, boolean, - object, or list. - - * Strings are double-quoted and can contain any UTF-8 characters. - Example: `"Hello, World"` - - * Multi-line strings start with `< 1 { - itemVal = n.Filter(keyStr) - done[keyStr] = struct{}{} - } - - // Make the field name - fieldName := fmt.Sprintf("%s.%s", name, keyStr) - - // Get the key/value as reflection values - key := reflect.ValueOf(keyStr) - val := reflect.Indirect(reflect.New(resultElemType)) - - // If we have a pre-existing value in the map, use that - oldVal := resultMap.MapIndex(key) - if oldVal.IsValid() { - val.Set(oldVal) - } - - // Decode! - if err := d.decode(fieldName, itemVal, val); err != nil { - return err - } - - // Set the value on the map - resultMap.SetMapIndex(key, val) - } - - // Set the final map if we can - set.Set(resultMap) - return nil -} - -func (d *decoder) decodePtr(name string, node ast.Node, result reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - resultType := result.Type() - resultElemType := resultType.Elem() - val := reflect.New(resultElemType) - if err := d.decode(name, node, reflect.Indirect(val)); err != nil { - return err - } - - result.Set(val) - return nil -} - -func (d *decoder) decodeSlice(name string, node ast.Node, result reflect.Value) error { - // If we have an interface, then we can address the interface, - // but not the slice itself, so get the element but set the interface - set := result - if result.Kind() == reflect.Interface { - result = result.Elem() - } - // Create the slice if it isn't nil - resultType := result.Type() - resultElemType := resultType.Elem() - if result.IsNil() { - resultSliceType := reflect.SliceOf(resultElemType) - result = reflect.MakeSlice( - resultSliceType, 0, 0) - } - - // Figure out the items we'll be copying into the slice - var items []ast.Node - switch n := node.(type) { - case *ast.ObjectList: - items = make([]ast.Node, len(n.Items)) - for i, item := range n.Items { - items[i] = item - } - case *ast.ObjectType: - items = []ast.Node{n} - case *ast.ListType: - items = n.List - default: - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("unknown slice type: %T", node), - } - } - - for i, item := range items { - fieldName := fmt.Sprintf("%s[%d]", name, i) - - // Decode - val := reflect.Indirect(reflect.New(resultElemType)) - - // if item is an object that was decoded from ambiguous JSON and - // flattened, make sure it's expanded if it needs to decode into a - // defined structure. - item := expandObject(item, val) - - if err := d.decode(fieldName, item, val); err != nil { - return err - } - - // Append it onto the slice - result = reflect.Append(result, val) - } - - set.Set(result) - return nil -} - -// expandObject detects if an ambiguous JSON object was flattened to a List which -// should be decoded into a struct, and expands the ast to properly deocode. -func expandObject(node ast.Node, result reflect.Value) ast.Node { - item, ok := node.(*ast.ObjectItem) - if !ok { - return node - } - - elemType := result.Type() - - // our target type must be a struct - switch elemType.Kind() { - case reflect.Ptr: - switch elemType.Elem().Kind() { - case reflect.Struct: - //OK - default: - return node - } - case reflect.Struct: - //OK - default: - return node - } - - // A list value will have a key and field name. If it had more fields, - // it wouldn't have been flattened. - if len(item.Keys) != 2 { - return node - } - - keyToken := item.Keys[0].Token - item.Keys = item.Keys[1:] - - // we need to un-flatten the ast enough to decode - newNode := &ast.ObjectItem{ - Keys: []*ast.ObjectKey{ - &ast.ObjectKey{ - Token: keyToken, - }, - }, - Val: &ast.ObjectType{ - List: &ast.ObjectList{ - Items: []*ast.ObjectItem{item}, - }, - }, - } - - return newNode -} - -func (d *decoder) decodeString(name string, node ast.Node, result reflect.Value) error { - switch n := node.(type) { - case *ast.LiteralType: - switch n.Token.Type { - case token.NUMBER: - result.Set(reflect.ValueOf(n.Token.Text).Convert(result.Type())) - return nil - case token.STRING, token.HEREDOC: - result.Set(reflect.ValueOf(n.Token.Value()).Convert(result.Type())) - return nil - } - } - - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown type for string %T", name, node), - } -} - -func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) error { - var item *ast.ObjectItem - if it, ok := node.(*ast.ObjectItem); ok { - item = it - node = it.Val - } - - if ot, ok := node.(*ast.ObjectType); ok { - node = ot.List - } - - // Handle the special case where the object itself is a literal. Previously - // the yacc parser would always ensure top-level elements were arrays. The new - // parser does not make the same guarantees, thus we need to convert any - // top-level literal elements into a list. - if _, ok := node.(*ast.LiteralType); ok && item != nil { - node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} - } - - list, ok := node.(*ast.ObjectList) - if !ok { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: not an object type for struct (%T)", name, node), - } - } - - // This slice will keep track of all the structs we'll be decoding. - // There can be more than one struct if there are embedded structs - // that are squashed. - structs := make([]reflect.Value, 1, 5) - structs[0] = result - - // Compile the list of all the fields that we're going to be decoding - // from all the structs. - fields := make(map[*reflect.StructField]reflect.Value) - for len(structs) > 0 { - structVal := structs[0] - structs = structs[1:] - - structType := structVal.Type() - for i := 0; i < structType.NumField(); i++ { - fieldType := structType.Field(i) - tagParts := strings.Split(fieldType.Tag.Get(tagName), ",") - - // Ignore fields with tag name "-" - if tagParts[0] == "-" { - continue - } - - if fieldType.Anonymous { - fieldKind := fieldType.Type.Kind() - if fieldKind != reflect.Struct { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unsupported type to struct: %s", - fieldType.Name, fieldKind), - } - } - - // We have an embedded field. We "squash" the fields down - // if specified in the tag. - squash := false - for _, tag := range tagParts[1:] { - if tag == "squash" { - squash = true - break - } - } - - if squash { - structs = append( - structs, result.FieldByName(fieldType.Name)) - continue - } - } - - // Normal struct field, store it away - fields[&fieldType] = structVal.Field(i) - } - } - - usedKeys := make(map[string]struct{}) - decodedFields := make([]string, 0, len(fields)) - decodedFieldsVal := make([]reflect.Value, 0) - unusedKeysVal := make([]reflect.Value, 0) - for fieldType, field := range fields { - if !field.IsValid() { - // This should never happen - panic("field is not valid") - } - - // If we can't set the field, then it is unexported or something, - // and we just continue onwards. - if !field.CanSet() { - continue - } - - fieldName := fieldType.Name - - tagValue := fieldType.Tag.Get(tagName) - tagParts := strings.SplitN(tagValue, ",", 2) - if len(tagParts) >= 2 { - switch tagParts[1] { - case "decodedFields": - decodedFieldsVal = append(decodedFieldsVal, field) - continue - case "key": - if item == nil { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: %s asked for 'key', impossible", - name, fieldName), - } - } - - field.SetString(item.Keys[0].Token.Value().(string)) - continue - case "unusedKeys": - unusedKeysVal = append(unusedKeysVal, field) - continue - } - } - - if tagParts[0] != "" { - fieldName = tagParts[0] - } - - // Determine the element we'll use to decode. If it is a single - // match (only object with the field), then we decode it exactly. - // If it is a prefix match, then we decode the matches. - filter := list.Filter(fieldName) - - prefixMatches := filter.Children() - matches := filter.Elem() - if len(matches.Items) == 0 && len(prefixMatches.Items) == 0 { - continue - } - - // Track the used key - usedKeys[fieldName] = struct{}{} - - // Create the field name and decode. We range over the elements - // because we actually want the value. - fieldName = fmt.Sprintf("%s.%s", name, fieldName) - if len(prefixMatches.Items) > 0 { - if err := d.decode(fieldName, prefixMatches, field); err != nil { - return err - } - } - for _, match := range matches.Items { - var decodeNode ast.Node = match.Val - if ot, ok := decodeNode.(*ast.ObjectType); ok { - decodeNode = &ast.ObjectList{Items: ot.List.Items} - } - - if err := d.decode(fieldName, decodeNode, field); err != nil { - return err - } - } - - decodedFields = append(decodedFields, fieldType.Name) - } - - if len(decodedFieldsVal) > 0 { - // Sort it so that it is deterministic - sort.Strings(decodedFields) - - for _, v := range decodedFieldsVal { - v.Set(reflect.ValueOf(decodedFields)) - } - } - - return nil -} - -// findNodeType returns the type of ast.Node -func findNodeType() reflect.Type { - var nodeContainer struct { - Node ast.Node - } - value := reflect.ValueOf(nodeContainer).FieldByName("Node") - return value.Type() -} diff --git a/vendor/github.com/hashicorp/hcl/hcl.go b/vendor/github.com/hashicorp/hcl/hcl.go deleted file mode 100644 index 575a20b50b..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl.go +++ /dev/null @@ -1,11 +0,0 @@ -// Package hcl decodes HCL into usable Go structures. -// -// hcl input can come in either pure HCL format or JSON format. -// It can be parsed into an AST, and then decoded into a structure, -// or it can be decoded directly from a string into a structure. -// -// If you choose to parse HCL into a raw AST, the benefit is that you -// can write custom visitor implementations to implement custom -// semantic checks. By default, HCL does not perform any semantic -// checks. -package hcl diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go deleted file mode 100644 index 6e5ef654bb..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go +++ /dev/null @@ -1,219 +0,0 @@ -// Package ast declares the types used to represent syntax trees for HCL -// (HashiCorp Configuration Language) -package ast - -import ( - "fmt" - "strings" - - "github.com/hashicorp/hcl/hcl/token" -) - -// Node is an element in the abstract syntax tree. -type Node interface { - node() - Pos() token.Pos -} - -func (File) node() {} -func (ObjectList) node() {} -func (ObjectKey) node() {} -func (ObjectItem) node() {} -func (Comment) node() {} -func (CommentGroup) node() {} -func (ObjectType) node() {} -func (LiteralType) node() {} -func (ListType) node() {} - -// File represents a single HCL file -type File struct { - Node Node // usually a *ObjectList - Comments []*CommentGroup // list of all comments in the source -} - -func (f *File) Pos() token.Pos { - return f.Node.Pos() -} - -// ObjectList represents a list of ObjectItems. An HCL file itself is an -// ObjectList. -type ObjectList struct { - Items []*ObjectItem -} - -func (o *ObjectList) Add(item *ObjectItem) { - o.Items = append(o.Items, item) -} - -// Filter filters out the objects with the given key list as a prefix. -// -// The returned list of objects contain ObjectItems where the keys have -// this prefix already stripped off. This might result in objects with -// zero-length key lists if they have no children. -// -// If no matches are found, an empty ObjectList (non-nil) is returned. -func (o *ObjectList) Filter(keys ...string) *ObjectList { - var result ObjectList - for _, item := range o.Items { - // If there aren't enough keys, then ignore this - if len(item.Keys) < len(keys) { - continue - } - - match := true - for i, key := range item.Keys[:len(keys)] { - key := key.Token.Value().(string) - if key != keys[i] && !strings.EqualFold(key, keys[i]) { - match = false - break - } - } - if !match { - continue - } - - // Strip off the prefix from the children - newItem := *item - newItem.Keys = newItem.Keys[len(keys):] - result.Add(&newItem) - } - - return &result -} - -// Children returns further nested objects (key length > 0) within this -// ObjectList. This should be used with Filter to get at child items. -func (o *ObjectList) Children() *ObjectList { - var result ObjectList - for _, item := range o.Items { - if len(item.Keys) > 0 { - result.Add(item) - } - } - - return &result -} - -// Elem returns items in the list that are direct element assignments -// (key length == 0). This should be used with Filter to get at elements. -func (o *ObjectList) Elem() *ObjectList { - var result ObjectList - for _, item := range o.Items { - if len(item.Keys) == 0 { - result.Add(item) - } - } - - return &result -} - -func (o *ObjectList) Pos() token.Pos { - // always returns the uninitiliazed position - return o.Items[0].Pos() -} - -// ObjectItem represents a HCL Object Item. An item is represented with a key -// (or keys). It can be an assignment or an object (both normal and nested) -type ObjectItem struct { - // keys is only one length long if it's of type assignment. If it's a - // nested object it can be larger than one. In that case "assign" is - // invalid as there is no assignments for a nested object. - Keys []*ObjectKey - - // assign contains the position of "=", if any - Assign token.Pos - - // val is the item itself. It can be an object,list, number, bool or a - // string. If key length is larger than one, val can be only of type - // Object. - Val Node - - LeadComment *CommentGroup // associated lead comment - LineComment *CommentGroup // associated line comment -} - -func (o *ObjectItem) Pos() token.Pos { - // I'm not entirely sure what causes this, but removing this causes - // a test failure. We should investigate at some point. - if len(o.Keys) == 0 { - return token.Pos{} - } - - return o.Keys[0].Pos() -} - -// ObjectKeys are either an identifier or of type string. -type ObjectKey struct { - Token token.Token -} - -func (o *ObjectKey) Pos() token.Pos { - return o.Token.Pos -} - -// LiteralType represents a literal of basic type. Valid types are: -// token.NUMBER, token.FLOAT, token.BOOL and token.STRING -type LiteralType struct { - Token token.Token - - // comment types, only used when in a list - LeadComment *CommentGroup - LineComment *CommentGroup -} - -func (l *LiteralType) Pos() token.Pos { - return l.Token.Pos -} - -// ListStatement represents a HCL List type -type ListType struct { - Lbrack token.Pos // position of "[" - Rbrack token.Pos // position of "]" - List []Node // the elements in lexical order -} - -func (l *ListType) Pos() token.Pos { - return l.Lbrack -} - -func (l *ListType) Add(node Node) { - l.List = append(l.List, node) -} - -// ObjectType represents a HCL Object Type -type ObjectType struct { - Lbrace token.Pos // position of "{" - Rbrace token.Pos // position of "}" - List *ObjectList // the nodes in lexical order -} - -func (o *ObjectType) Pos() token.Pos { - return o.Lbrace -} - -// Comment node represents a single //, # style or /*- style commment -type Comment struct { - Start token.Pos // position of / or # - Text string -} - -func (c *Comment) Pos() token.Pos { - return c.Start -} - -// CommentGroup node represents a sequence of comments with no other tokens and -// no empty lines between. -type CommentGroup struct { - List []*Comment // len(List) > 0 -} - -func (c *CommentGroup) Pos() token.Pos { - return c.List[0].Pos() -} - -//------------------------------------------------------------------- -// GoStringer -//------------------------------------------------------------------- - -func (o *ObjectKey) GoString() string { return fmt.Sprintf("*%#v", *o) } -func (o *ObjectList) GoString() string { return fmt.Sprintf("*%#v", *o) } diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go deleted file mode 100644 index ba07ad42b0..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go +++ /dev/null @@ -1,52 +0,0 @@ -package ast - -import "fmt" - -// WalkFunc describes a function to be called for each node during a Walk. The -// returned node can be used to rewrite the AST. Walking stops the returned -// bool is false. -type WalkFunc func(Node) (Node, bool) - -// Walk traverses an AST in depth-first order: It starts by calling fn(node); -// node must not be nil. If fn returns true, Walk invokes fn recursively for -// each of the non-nil children of node, followed by a call of fn(nil). The -// returned node of fn can be used to rewrite the passed node to fn. -func Walk(node Node, fn WalkFunc) Node { - rewritten, ok := fn(node) - if !ok { - return rewritten - } - - switch n := node.(type) { - case *File: - n.Node = Walk(n.Node, fn) - case *ObjectList: - for i, item := range n.Items { - n.Items[i] = Walk(item, fn).(*ObjectItem) - } - case *ObjectKey: - // nothing to do - case *ObjectItem: - for i, k := range n.Keys { - n.Keys[i] = Walk(k, fn).(*ObjectKey) - } - - if n.Val != nil { - n.Val = Walk(n.Val, fn) - } - case *LiteralType: - // nothing to do - case *ListType: - for i, l := range n.List { - n.List[i] = Walk(l, fn) - } - case *ObjectType: - n.List = Walk(n.List, fn).(*ObjectList) - default: - // should we panic here? - fmt.Printf("unknown type: %T\n", n) - } - - fn(nil) - return rewritten -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go deleted file mode 100644 index 5c99381dfb..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go +++ /dev/null @@ -1,17 +0,0 @@ -package parser - -import ( - "fmt" - - "github.com/hashicorp/hcl/hcl/token" -) - -// PosError is a parse error that contains a position. -type PosError struct { - Pos token.Pos - Err error -} - -func (e *PosError) Error() string { - return fmt.Sprintf("At %s: %s", e.Pos, e.Err) -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go deleted file mode 100644 index 6e54bed974..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go +++ /dev/null @@ -1,514 +0,0 @@ -// Package parser implements a parser for HCL (HashiCorp Configuration -// Language) -package parser - -import ( - "errors" - "fmt" - "strings" - - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/hcl/hcl/scanner" - "github.com/hashicorp/hcl/hcl/token" -) - -type Parser struct { - sc *scanner.Scanner - - // Last read token - tok token.Token - commaPrev token.Token - - comments []*ast.CommentGroup - leadComment *ast.CommentGroup // last lead comment - lineComment *ast.CommentGroup // last line comment - - enableTrace bool - indent int - n int // buffer size (max = 1) -} - -func newParser(src []byte) *Parser { - return &Parser{ - sc: scanner.New(src), - } -} - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func Parse(src []byte) (*ast.File, error) { - p := newParser(src) - return p.Parse() -} - -var errEofToken = errors.New("EOF token found") - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func (p *Parser) Parse() (*ast.File, error) { - f := &ast.File{} - var err, scerr error - p.sc.Error = func(pos token.Pos, msg string) { - scerr = &PosError{Pos: pos, Err: errors.New(msg)} - } - - f.Node, err = p.objectList(false) - if scerr != nil { - return nil, scerr - } - if err != nil { - return nil, err - } - - f.Comments = p.comments - return f, nil -} - -// objectList parses a list of items within an object (generally k/v pairs). -// The parameter" obj" tells this whether to we are within an object (braces: -// '{', '}') or just at the top level. If we're within an object, we end -// at an RBRACE. -func (p *Parser) objectList(obj bool) (*ast.ObjectList, error) { - defer un(trace(p, "ParseObjectList")) - node := &ast.ObjectList{} - - for { - if obj { - tok := p.scan() - p.unscan() - if tok.Type == token.RBRACE { - break - } - } - - n, err := p.objectItem() - if err == errEofToken { - break // we are finished - } - - // we don't return a nil node, because might want to use already - // collected items. - if err != nil { - return node, err - } - - node.Add(n) - - // object lists can be optionally comma-delimited e.g. when a list of maps - // is being expressed, so a comma is allowed here - it's simply consumed - tok := p.scan() - if tok.Type != token.COMMA { - p.unscan() - } - } - return node, nil -} - -func (p *Parser) consumeComment() (comment *ast.Comment, endline int) { - endline = p.tok.Pos.Line - - // count the endline if it's multiline comment, ie starting with /* - if len(p.tok.Text) > 1 && p.tok.Text[1] == '*' { - // don't use range here - no need to decode Unicode code points - for i := 0; i < len(p.tok.Text); i++ { - if p.tok.Text[i] == '\n' { - endline++ - } - } - } - - comment = &ast.Comment{Start: p.tok.Pos, Text: p.tok.Text} - p.tok = p.sc.Scan() - return -} - -func (p *Parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { - var list []*ast.Comment - endline = p.tok.Pos.Line - - for p.tok.Type == token.COMMENT && p.tok.Pos.Line <= endline+n { - var comment *ast.Comment - comment, endline = p.consumeComment() - list = append(list, comment) - } - - // add comment group to the comments list - comments = &ast.CommentGroup{List: list} - p.comments = append(p.comments, comments) - - return -} - -// objectItem parses a single object item -func (p *Parser) objectItem() (*ast.ObjectItem, error) { - defer un(trace(p, "ParseObjectItem")) - - keys, err := p.objectKey() - if len(keys) > 0 && err == errEofToken { - // We ignore eof token here since it is an error if we didn't - // receive a value (but we did receive a key) for the item. - err = nil - } - if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE { - // This is a strange boolean statement, but what it means is: - // We have keys with no value, and we're likely in an object - // (since RBrace ends an object). For this, we set err to nil so - // we continue and get the error below of having the wrong value - // type. - err = nil - - // Reset the token type so we don't think it completed fine. See - // objectType which uses p.tok.Type to check if we're done with - // the object. - p.tok.Type = token.EOF - } - if err != nil { - return nil, err - } - - o := &ast.ObjectItem{ - Keys: keys, - } - - if p.leadComment != nil { - o.LeadComment = p.leadComment - p.leadComment = nil - } - - switch p.tok.Type { - case token.ASSIGN: - o.Assign = p.tok.Pos - o.Val, err = p.object() - if err != nil { - return nil, err - } - case token.LBRACE: - o.Val, err = p.objectType() - if err != nil { - return nil, err - } - default: - keyStr := make([]string, 0, len(keys)) - for _, k := range keys { - keyStr = append(keyStr, k.Token.Text) - } - - return nil, fmt.Errorf( - "key '%s' expected start of object ('{') or assignment ('=')", - strings.Join(keyStr, " ")) - } - - // do a look-ahead for line comment - p.scan() - if len(keys) > 0 && o.Val.Pos().Line == keys[0].Pos().Line && p.lineComment != nil { - o.LineComment = p.lineComment - p.lineComment = nil - } - p.unscan() - return o, nil -} - -// objectKey parses an object key and returns a ObjectKey AST -func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { - keyCount := 0 - keys := make([]*ast.ObjectKey, 0) - - for { - tok := p.scan() - switch tok.Type { - case token.EOF: - // It is very important to also return the keys here as well as - // the error. This is because we need to be able to tell if we - // did parse keys prior to finding the EOF, or if we just found - // a bare EOF. - return keys, errEofToken - case token.ASSIGN: - // assignment or object only, but not nested objects. this is not - // allowed: `foo bar = {}` - if keyCount > 1 { - return nil, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("nested object expected: LBRACE got: %s", p.tok.Type), - } - } - - if keyCount == 0 { - return nil, &PosError{ - Pos: p.tok.Pos, - Err: errors.New("no object keys found!"), - } - } - - return keys, nil - case token.LBRACE: - var err error - - // If we have no keys, then it is a syntax error. i.e. {{}} is not - // allowed. - if len(keys) == 0 { - err = &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type), - } - } - - // object - return keys, err - case token.IDENT, token.STRING: - keyCount++ - keys = append(keys, &ast.ObjectKey{Token: p.tok}) - case token.ILLEGAL: - return keys, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("illegal character"), - } - default: - return keys, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type), - } - } - } -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) object() (ast.Node, error) { - defer un(trace(p, "ParseType")) - tok := p.scan() - - switch tok.Type { - case token.NUMBER, token.FLOAT, token.BOOL, token.STRING, token.HEREDOC: - return p.literalType() - case token.LBRACE: - return p.objectType() - case token.LBRACK: - return p.listType() - case token.COMMENT: - // implement comment - case token.EOF: - return nil, errEofToken - } - - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf("Unknown token: %+v", tok), - } -} - -// objectType parses an object type and returns a ObjectType AST -func (p *Parser) objectType() (*ast.ObjectType, error) { - defer un(trace(p, "ParseObjectType")) - - // we assume that the currently scanned token is a LBRACE - o := &ast.ObjectType{ - Lbrace: p.tok.Pos, - } - - l, err := p.objectList(true) - - // if we hit RBRACE, we are good to go (means we parsed all Items), if it's - // not a RBRACE, it's an syntax error and we just return it. - if err != nil && p.tok.Type != token.RBRACE { - return nil, err - } - - // No error, scan and expect the ending to be a brace - if tok := p.scan(); tok.Type != token.RBRACE { - return nil, fmt.Errorf("object expected closing RBRACE got: %s", tok.Type) - } - - o.List = l - o.Rbrace = p.tok.Pos // advanced via parseObjectList - return o, nil -} - -// listType parses a list type and returns a ListType AST -func (p *Parser) listType() (*ast.ListType, error) { - defer un(trace(p, "ParseListType")) - - // we assume that the currently scanned token is a LBRACK - l := &ast.ListType{ - Lbrack: p.tok.Pos, - } - - needComma := false - for { - tok := p.scan() - if needComma { - switch tok.Type { - case token.COMMA, token.RBRACK: - default: - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error parsing list, expected comma or list end, got: %s", - tok.Type), - } - } - } - switch tok.Type { - case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC: - node, err := p.literalType() - if err != nil { - return nil, err - } - - // If there is a lead comment, apply it - if p.leadComment != nil { - node.LeadComment = p.leadComment - p.leadComment = nil - } - - l.Add(node) - needComma = true - case token.COMMA: - // get next list item or we are at the end - // do a look-ahead for line comment - p.scan() - if p.lineComment != nil && len(l.List) > 0 { - lit, ok := l.List[len(l.List)-1].(*ast.LiteralType) - if ok { - lit.LineComment = p.lineComment - l.List[len(l.List)-1] = lit - p.lineComment = nil - } - } - p.unscan() - - needComma = false - continue - case token.LBRACE: - // Looks like a nested object, so parse it out - node, err := p.objectType() - if err != nil { - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error while trying to parse object within list: %s", err), - } - } - l.Add(node) - needComma = true - case token.LBRACK: - node, err := p.listType() - if err != nil { - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error while trying to parse list within list: %s", err), - } - } - l.Add(node) - case token.RBRACK: - // finished - l.Rbrack = p.tok.Pos - return l, nil - default: - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf("unexpected token while parsing list: %s", tok.Type), - } - } - } -} - -// literalType parses a literal type and returns a LiteralType AST -func (p *Parser) literalType() (*ast.LiteralType, error) { - defer un(trace(p, "ParseLiteral")) - - return &ast.LiteralType{ - Token: p.tok, - }, nil -} - -// scan returns the next token from the underlying scanner. If a token has -// been unscanned then read that instead. In the process, it collects any -// comment groups encountered, and remembers the last lead and line comments. -func (p *Parser) scan() token.Token { - // If we have a token on the buffer, then return it. - if p.n != 0 { - p.n = 0 - return p.tok - } - - // Otherwise read the next token from the scanner and Save it to the buffer - // in case we unscan later. - prev := p.tok - p.tok = p.sc.Scan() - - if p.tok.Type == token.COMMENT { - var comment *ast.CommentGroup - var endline int - - // fmt.Printf("p.tok.Pos.Line = %+v prev: %d endline %d \n", - // p.tok.Pos.Line, prev.Pos.Line, endline) - if p.tok.Pos.Line == prev.Pos.Line { - // The comment is on same line as the previous token; it - // cannot be a lead comment but may be a line comment. - comment, endline = p.consumeCommentGroup(0) - if p.tok.Pos.Line != endline { - // The next token is on a different line, thus - // the last comment group is a line comment. - p.lineComment = comment - } - } - - // consume successor comments, if any - endline = -1 - for p.tok.Type == token.COMMENT { - comment, endline = p.consumeCommentGroup(1) - } - - if endline+1 == p.tok.Pos.Line && p.tok.Type != token.RBRACE { - switch p.tok.Type { - case token.RBRACE, token.RBRACK: - // Do not count for these cases - default: - // The next token is following on the line immediately after the - // comment group, thus the last comment group is a lead comment. - p.leadComment = comment - } - } - - } - - return p.tok -} - -// unscan pushes the previously read token back onto the buffer. -func (p *Parser) unscan() { - p.n = 1 -} - -// ---------------------------------------------------------------------------- -// Parsing support - -func (p *Parser) printTrace(a ...interface{}) { - if !p.enableTrace { - return - } - - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = len(dots) - fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) - - i := 2 * p.indent - for i > n { - fmt.Print(dots) - i -= n - } - // i <= n - fmt.Print(dots[0:i]) - fmt.Println(a...) -} - -func trace(p *Parser, msg string) *Parser { - p.printTrace(msg, "(") - p.indent++ - return p -} - -// Usage pattern: defer un(trace(p, "...")) -func un(p *Parser) { - p.indent-- - p.printTrace(")") -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go deleted file mode 100644 index 69662367f0..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go +++ /dev/null @@ -1,651 +0,0 @@ -// Package scanner implements a scanner for HCL (HashiCorp Configuration -// Language) source text. -package scanner - -import ( - "bytes" - "fmt" - "os" - "regexp" - "unicode" - "unicode/utf8" - - "github.com/hashicorp/hcl/hcl/token" -) - -// eof represents a marker rune for the end of the reader. -const eof = rune(0) - -// Scanner defines a lexical scanner -type Scanner struct { - buf *bytes.Buffer // Source buffer for advancing and scanning - src []byte // Source buffer for immutable access - - // Source Position - srcPos token.Pos // current position - prevPos token.Pos // previous position, used for peek() method - - lastCharLen int // length of last character in bytes - lastLineLen int // length of last line in characters (for correct column reporting) - - tokStart int // token text start position - tokEnd int // token text end position - - // Error is called for each error encountered. If no Error - // function is set, the error is reported to os.Stderr. - Error func(pos token.Pos, msg string) - - // ErrorCount is incremented by one for each error encountered. - ErrorCount int - - // tokPos is the start position of most recently scanned token; set by - // Scan. The Filename field is always left untouched by the Scanner. If - // an error is reported (via Error) and Position is invalid, the scanner is - // not inside a token. - tokPos token.Pos -} - -// New creates and initializes a new instance of Scanner using src as -// its source content. -func New(src []byte) *Scanner { - // even though we accept a src, we read from a io.Reader compatible type - // (*bytes.Buffer). So in the future we might easily change it to streaming - // read. - b := bytes.NewBuffer(src) - s := &Scanner{ - buf: b, - src: src, - } - - // srcPosition always starts with 1 - s.srcPos.Line = 1 - return s -} - -// next reads the next rune from the bufferred reader. Returns the rune(0) if -// an error occurs (or io.EOF is returned). -func (s *Scanner) next() rune { - ch, size, err := s.buf.ReadRune() - if err != nil { - // advance for error reporting - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - return eof - } - - if ch == utf8.RuneError && size == 1 { - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - s.err("illegal UTF-8 encoding") - return ch - } - - // remember last position - s.prevPos = s.srcPos - - s.srcPos.Column++ - s.lastCharLen = size - s.srcPos.Offset += size - - if ch == '\n' { - s.srcPos.Line++ - s.lastLineLen = s.srcPos.Column - s.srcPos.Column = 0 - } - - // If we see a null character with data left, then that is an error - if ch == '\x00' && s.buf.Len() > 0 { - s.err("unexpected null character (0x00)") - return eof - } - - // debug - // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) - return ch -} - -// unread unreads the previous read Rune and updates the source position -func (s *Scanner) unread() { - if err := s.buf.UnreadRune(); err != nil { - panic(err) // this is user fault, we should catch it - } - s.srcPos = s.prevPos // put back last position -} - -// peek returns the next rune without advancing the reader. -func (s *Scanner) peek() rune { - peek, _, err := s.buf.ReadRune() - if err != nil { - return eof - } - - s.buf.UnreadRune() - return peek -} - -// Scan scans the next token and returns the token. -func (s *Scanner) Scan() token.Token { - ch := s.next() - - // skip white space - for isWhitespace(ch) { - ch = s.next() - } - - var tok token.Type - - // token text markings - s.tokStart = s.srcPos.Offset - s.lastCharLen - - // token position, initial next() is moving the offset by one(size of rune - // actually), though we are interested with the starting point - s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen - if s.srcPos.Column > 0 { - // common case: last character was not a '\n' - s.tokPos.Line = s.srcPos.Line - s.tokPos.Column = s.srcPos.Column - } else { - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - s.tokPos.Line = s.srcPos.Line - 1 - s.tokPos.Column = s.lastLineLen - } - - switch { - case isLetter(ch): - tok = token.IDENT - lit := s.scanIdentifier() - if lit == "true" || lit == "false" { - tok = token.BOOL - } - case isDecimal(ch): - tok = s.scanNumber(ch) - default: - switch ch { - case eof: - tok = token.EOF - case '"': - tok = token.STRING - s.scanString() - case '#', '/': - tok = token.COMMENT - s.scanComment(ch) - case '.': - tok = token.PERIOD - ch = s.peek() - if isDecimal(ch) { - tok = token.FLOAT - ch = s.scanMantissa(ch) - ch = s.scanExponent(ch) - } - case '<': - tok = token.HEREDOC - s.scanHeredoc() - case '[': - tok = token.LBRACK - case ']': - tok = token.RBRACK - case '{': - tok = token.LBRACE - case '}': - tok = token.RBRACE - case ',': - tok = token.COMMA - case '=': - tok = token.ASSIGN - case '+': - tok = token.ADD - case '-': - if isDecimal(s.peek()) { - ch := s.next() - tok = s.scanNumber(ch) - } else { - tok = token.SUB - } - default: - s.err("illegal char") - } - } - - // finish token ending - s.tokEnd = s.srcPos.Offset - - // create token literal - var tokenText string - if s.tokStart >= 0 { - tokenText = string(s.src[s.tokStart:s.tokEnd]) - } - s.tokStart = s.tokEnd // ensure idempotency of tokenText() call - - return token.Token{ - Type: tok, - Pos: s.tokPos, - Text: tokenText, - } -} - -func (s *Scanner) scanComment(ch rune) { - // single line comments - if ch == '#' || (ch == '/' && s.peek() != '*') { - if ch == '/' && s.peek() != '/' { - s.err("expected '/' for comment") - return - } - - ch = s.next() - for ch != '\n' && ch >= 0 && ch != eof { - ch = s.next() - } - if ch != eof && ch >= 0 { - s.unread() - } - return - } - - // be sure we get the character after /* This allows us to find comment's - // that are not erminated - if ch == '/' { - s.next() - ch = s.next() // read character after "/*" - } - - // look for /* - style comments - for { - if ch < 0 || ch == eof { - s.err("comment not terminated") - break - } - - ch0 := ch - ch = s.next() - if ch0 == '*' && ch == '/' { - break - } - } -} - -// scanNumber scans a HCL number definition starting with the given rune -func (s *Scanner) scanNumber(ch rune) token.Type { - if ch == '0' { - // check for hexadecimal, octal or float - ch = s.next() - if ch == 'x' || ch == 'X' { - // hexadecimal - ch = s.next() - found := false - for isHexadecimal(ch) { - ch = s.next() - found = true - } - - if !found { - s.err("illegal hexadecimal number") - } - - if ch != eof { - s.unread() - } - - return token.NUMBER - } - - // now it's either something like: 0421(octal) or 0.1231(float) - illegalOctal := false - for isDecimal(ch) { - ch = s.next() - if ch == '8' || ch == '9' { - // this is just a possibility. For example 0159 is illegal, but - // 0159.23 is valid. So we mark a possible illegal octal. If - // the next character is not a period, we'll print the error. - illegalOctal = true - } - } - - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if illegalOctal { - s.err("illegal octal number") - } - - if ch != eof { - s.unread() - } - return token.NUMBER - } - - s.scanMantissa(ch) - ch = s.next() // seek forward - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if ch != eof { - s.unread() - } - return token.NUMBER -} - -// scanMantissa scans the mantissa begining from the rune. It returns the next -// non decimal rune. It's used to determine wheter it's a fraction or exponent. -func (s *Scanner) scanMantissa(ch rune) rune { - scanned := false - for isDecimal(ch) { - ch = s.next() - scanned = true - } - - if scanned && ch != eof { - s.unread() - } - return ch -} - -// scanFraction scans the fraction after the '.' rune -func (s *Scanner) scanFraction(ch rune) rune { - if ch == '.' { - ch = s.peek() // we peek just to see if we can move forward - ch = s.scanMantissa(ch) - } - return ch -} - -// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' -// rune. -func (s *Scanner) scanExponent(ch rune) rune { - if ch == 'e' || ch == 'E' { - ch = s.next() - if ch == '-' || ch == '+' { - ch = s.next() - } - ch = s.scanMantissa(ch) - } - return ch -} - -// scanHeredoc scans a heredoc string -func (s *Scanner) scanHeredoc() { - // Scan the second '<' in example: '<= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) { - break - } - - // Not an anchor match, record the start of a new line - lineStart = s.srcPos.Offset - } - - if ch == eof { - s.err("heredoc not terminated") - return - } - } - - return -} - -// scanString scans a quoted string -func (s *Scanner) scanString() { - braces := 0 - for { - // '"' opening already consumed - // read character after quote - ch := s.next() - - if (ch == '\n' && braces == 0) || ch < 0 || ch == eof { - s.err("literal not terminated") - return - } - - if ch == '"' && braces == 0 { - break - } - - // If we're going into a ${} then we can ignore quotes for awhile - if braces == 0 && ch == '$' && s.peek() == '{' { - braces++ - s.next() - } else if braces > 0 && ch == '{' { - braces++ - } - if braces > 0 && ch == '}' { - braces-- - } - - if ch == '\\' { - s.scanEscape() - } - } - - return -} - -// scanEscape scans an escape sequence -func (s *Scanner) scanEscape() rune { - // http://en.cppreference.com/w/cpp/language/escape - ch := s.next() // read character after '/' - switch ch { - case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': - // nothing to do - case '0', '1', '2', '3', '4', '5', '6', '7': - // octal notation - ch = s.scanDigits(ch, 8, 3) - case 'x': - // hexademical notation - ch = s.scanDigits(s.next(), 16, 2) - case 'u': - // universal character name - ch = s.scanDigits(s.next(), 16, 4) - case 'U': - // universal character name - ch = s.scanDigits(s.next(), 16, 8) - default: - s.err("illegal char escape") - } - return ch -} - -// scanDigits scans a rune with the given base for n times. For example an -// octal notation \184 would yield in scanDigits(ch, 8, 3) -func (s *Scanner) scanDigits(ch rune, base, n int) rune { - start := n - for n > 0 && digitVal(ch) < base { - ch = s.next() - if ch == eof { - // If we see an EOF, we halt any more scanning of digits - // immediately. - break - } - - n-- - } - if n > 0 { - s.err("illegal char escape") - } - - if n != start { - // we scanned all digits, put the last non digit char back, - // only if we read anything at all - s.unread() - } - - return ch -} - -// scanIdentifier scans an identifier and returns the literal string -func (s *Scanner) scanIdentifier() string { - offs := s.srcPos.Offset - s.lastCharLen - ch := s.next() - for isLetter(ch) || isDigit(ch) || ch == '-' || ch == '.' { - ch = s.next() - } - - if ch != eof { - s.unread() // we got identifier, put back latest char - } - - return string(s.src[offs:s.srcPos.Offset]) -} - -// recentPosition returns the position of the character immediately after the -// character or token returned by the last call to Scan. -func (s *Scanner) recentPosition() (pos token.Pos) { - pos.Offset = s.srcPos.Offset - s.lastCharLen - switch { - case s.srcPos.Column > 0: - // common case: last character was not a '\n' - pos.Line = s.srcPos.Line - pos.Column = s.srcPos.Column - case s.lastLineLen > 0: - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - pos.Line = s.srcPos.Line - 1 - pos.Column = s.lastLineLen - default: - // at the beginning of the source - pos.Line = 1 - pos.Column = 1 - } - return -} - -// err prints the error of any scanning to s.Error function. If the function is -// not defined, by default it prints them to os.Stderr -func (s *Scanner) err(msg string) { - s.ErrorCount++ - pos := s.recentPosition() - - if s.Error != nil { - s.Error(pos, msg) - return - } - - fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) -} - -// isHexadecimal returns true if the given rune is a letter -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) -} - -// isDigit returns true if the given rune is a decimal digit -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) -} - -// isDecimal returns true if the given rune is a decimal number -func isDecimal(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -// isHexadecimal returns true if the given rune is an hexadecimal number -func isHexadecimal(ch rune) bool { - return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' -} - -// isWhitespace returns true if the rune is a space, tab, newline or carriage return -func isWhitespace(ch rune) bool { - return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' -} - -// digitVal returns the integer value of a given octal,decimal or hexadecimal rune -func digitVal(ch rune) int { - switch { - case '0' <= ch && ch <= '9': - return int(ch - '0') - case 'a' <= ch && ch <= 'f': - return int(ch - 'a' + 10) - case 'A' <= ch && ch <= 'F': - return int(ch - 'A' + 10) - } - return 16 // larger than any legal digit val -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go deleted file mode 100644 index 5f981eaa2f..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go +++ /dev/null @@ -1,241 +0,0 @@ -package strconv - -import ( - "errors" - "unicode/utf8" -) - -// ErrSyntax indicates that a value does not have the right syntax for the target type. -var ErrSyntax = errors.New("invalid syntax") - -// Unquote interprets s as a single-quoted, double-quoted, -// or backquoted Go string literal, returning the string value -// that s quotes. (If s is single-quoted, it would be a Go -// character literal; Unquote returns the corresponding -// one-character string.) -func Unquote(s string) (t string, err error) { - n := len(s) - if n < 2 { - return "", ErrSyntax - } - quote := s[0] - if quote != s[n-1] { - return "", ErrSyntax - } - s = s[1 : n-1] - - if quote != '"' { - return "", ErrSyntax - } - if !contains(s, '$') && !contains(s, '{') && contains(s, '\n') { - return "", ErrSyntax - } - - // Is it trivial? Avoid allocation. - if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') { - switch quote { - case '"': - return s, nil - case '\'': - r, size := utf8.DecodeRuneInString(s) - if size == len(s) && (r != utf8.RuneError || size != 1) { - return s, nil - } - } - } - - var runeTmp [utf8.UTFMax]byte - buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. - for len(s) > 0 { - // If we're starting a '${}' then let it through un-unquoted. - // Specifically: we don't unquote any characters within the `${}` - // section. - if s[0] == '$' && len(s) > 1 && s[1] == '{' { - buf = append(buf, '$', '{') - s = s[2:] - - // Continue reading until we find the closing brace, copying as-is - braces := 1 - for len(s) > 0 && braces > 0 { - r, size := utf8.DecodeRuneInString(s) - if r == utf8.RuneError { - return "", ErrSyntax - } - - s = s[size:] - - n := utf8.EncodeRune(runeTmp[:], r) - buf = append(buf, runeTmp[:n]...) - - switch r { - case '{': - braces++ - case '}': - braces-- - } - } - if braces != 0 { - return "", ErrSyntax - } - if len(s) == 0 { - // If there's no string left, we're done! - break - } else { - // If there's more left, we need to pop back up to the top of the loop - // in case there's another interpolation in this string. - continue - } - } - - if s[0] == '\n' { - return "", ErrSyntax - } - - c, multibyte, ss, err := unquoteChar(s, quote) - if err != nil { - return "", err - } - s = ss - if c < utf8.RuneSelf || !multibyte { - buf = append(buf, byte(c)) - } else { - n := utf8.EncodeRune(runeTmp[:], c) - buf = append(buf, runeTmp[:n]...) - } - if quote == '\'' && len(s) != 0 { - // single-quoted must be single character - return "", ErrSyntax - } - } - return string(buf), nil -} - -// contains reports whether the string contains the byte c. -func contains(s string, c byte) bool { - for i := 0; i < len(s); i++ { - if s[i] == c { - return true - } - } - return false -} - -func unhex(b byte) (v rune, ok bool) { - c := rune(b) - switch { - case '0' <= c && c <= '9': - return c - '0', true - case 'a' <= c && c <= 'f': - return c - 'a' + 10, true - case 'A' <= c && c <= 'F': - return c - 'A' + 10, true - } - return -} - -func unquoteChar(s string, quote byte) (value rune, multibyte bool, tail string, err error) { - // easy cases - switch c := s[0]; { - case c == quote && (quote == '\'' || quote == '"'): - err = ErrSyntax - return - case c >= utf8.RuneSelf: - r, size := utf8.DecodeRuneInString(s) - return r, true, s[size:], nil - case c != '\\': - return rune(s[0]), false, s[1:], nil - } - - // hard case: c is backslash - if len(s) <= 1 { - err = ErrSyntax - return - } - c := s[1] - s = s[2:] - - switch c { - case 'a': - value = '\a' - case 'b': - value = '\b' - case 'f': - value = '\f' - case 'n': - value = '\n' - case 'r': - value = '\r' - case 't': - value = '\t' - case 'v': - value = '\v' - case 'x', 'u', 'U': - n := 0 - switch c { - case 'x': - n = 2 - case 'u': - n = 4 - case 'U': - n = 8 - } - var v rune - if len(s) < n { - err = ErrSyntax - return - } - for j := 0; j < n; j++ { - x, ok := unhex(s[j]) - if !ok { - err = ErrSyntax - return - } - v = v<<4 | x - } - s = s[n:] - if c == 'x' { - // single-byte string, possibly not UTF-8 - value = v - break - } - if v > utf8.MaxRune { - err = ErrSyntax - return - } - value = v - multibyte = true - case '0', '1', '2', '3', '4', '5', '6', '7': - v := rune(c) - '0' - if len(s) < 2 { - err = ErrSyntax - return - } - for j := 0; j < 2; j++ { // one digit already; two more - x := rune(s[j]) - '0' - if x < 0 || x > 7 { - err = ErrSyntax - return - } - v = (v << 3) | x - } - s = s[2:] - if v > 255 { - err = ErrSyntax - return - } - value = v - case '\\': - value = '\\' - case '\'', '"': - if c != quote { - err = ErrSyntax - return - } - value = rune(c) - default: - err = ErrSyntax - return - } - tail = s - return -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/position.go b/vendor/github.com/hashicorp/hcl/hcl/token/position.go deleted file mode 100644 index 59c1bb72d4..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/token/position.go +++ /dev/null @@ -1,46 +0,0 @@ -package token - -import "fmt" - -// Pos describes an arbitrary source position -// including the file, line, and column location. -// A Position is valid if the line number is > 0. -type Pos struct { - Filename string // filename, if any - Offset int // offset, starting at 0 - Line int // line number, starting at 1 - Column int // column number, starting at 1 (character count) -} - -// IsValid returns true if the position is valid. -func (p *Pos) IsValid() bool { return p.Line > 0 } - -// String returns a string in one of several forms: -// -// file:line:column valid position with file name -// line:column valid position without file name -// file invalid position with file name -// - invalid position without file name -func (p Pos) String() string { - s := p.Filename - if p.IsValid() { - if s != "" { - s += ":" - } - s += fmt.Sprintf("%d:%d", p.Line, p.Column) - } - if s == "" { - s = "-" - } - return s -} - -// Before reports whether the position p is before u. -func (p Pos) Before(u Pos) bool { - return u.Offset > p.Offset || u.Line > p.Line -} - -// After reports whether the position p is after u. -func (p Pos) After(u Pos) bool { - return u.Offset < p.Offset || u.Line < p.Line -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/token.go b/vendor/github.com/hashicorp/hcl/hcl/token/token.go deleted file mode 100644 index e37c0664ec..0000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/token/token.go +++ /dev/null @@ -1,219 +0,0 @@ -// Package token defines constants representing the lexical tokens for HCL -// (HashiCorp Configuration Language) -package token - -import ( - "fmt" - "strconv" - "strings" - - hclstrconv "github.com/hashicorp/hcl/hcl/strconv" -) - -// Token defines a single HCL token which can be obtained via the Scanner -type Token struct { - Type Type - Pos Pos - Text string - JSON bool -} - -// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) -type Type int - -const ( - // Special tokens - ILLEGAL Type = iota - EOF - COMMENT - - identifier_beg - IDENT // literals - literal_beg - NUMBER // 12345 - FLOAT // 123.45 - BOOL // true,false - STRING // "abc" - HEREDOC // < 0 { - // Pop the current item - n := len(frontier) - item := frontier[n-1] - frontier = frontier[:n-1] - - switch v := item.Val.(type) { - case *ast.ObjectType: - items, frontier = flattenObjectType(v, item, items, frontier) - case *ast.ListType: - items, frontier = flattenListType(v, item, items, frontier) - default: - items = append(items, item) - } - } - - // Reverse the list since the frontier model runs things backwards - for i := len(items)/2 - 1; i >= 0; i-- { - opp := len(items) - 1 - i - items[i], items[opp] = items[opp], items[i] - } - - // Done! Set the original items - list.Items = items - return n, true - }) -} - -func flattenListType( - ot *ast.ListType, - item *ast.ObjectItem, - items []*ast.ObjectItem, - frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { - // If the list is empty, keep the original list - if len(ot.List) == 0 { - items = append(items, item) - return items, frontier - } - - // All the elements of this object must also be objects! - for _, subitem := range ot.List { - if _, ok := subitem.(*ast.ObjectType); !ok { - items = append(items, item) - return items, frontier - } - } - - // Great! We have a match go through all the items and flatten - for _, elem := range ot.List { - // Add it to the frontier so that we can recurse - frontier = append(frontier, &ast.ObjectItem{ - Keys: item.Keys, - Assign: item.Assign, - Val: elem, - LeadComment: item.LeadComment, - LineComment: item.LineComment, - }) - } - - return items, frontier -} - -func flattenObjectType( - ot *ast.ObjectType, - item *ast.ObjectItem, - items []*ast.ObjectItem, - frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { - // If the list has no items we do not have to flatten anything - if ot.List.Items == nil { - items = append(items, item) - return items, frontier - } - - // All the elements of this object must also be objects! - for _, subitem := range ot.List.Items { - if _, ok := subitem.Val.(*ast.ObjectType); !ok { - items = append(items, item) - return items, frontier - } - } - - // Great! We have a match go through all the items and flatten - for _, subitem := range ot.List.Items { - // Copy the new key - keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys)) - copy(keys, item.Keys) - copy(keys[len(item.Keys):], subitem.Keys) - - // Add it to the frontier so that we can recurse - frontier = append(frontier, &ast.ObjectItem{ - Keys: keys, - Assign: item.Assign, - Val: subitem.Val, - LeadComment: item.LeadComment, - LineComment: item.LineComment, - }) - } - - return items, frontier -} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go deleted file mode 100644 index 6f46085300..0000000000 --- a/vendor/github.com/hashicorp/hcl/json/parser/parser.go +++ /dev/null @@ -1,313 +0,0 @@ -package parser - -import ( - "errors" - "fmt" - - "github.com/hashicorp/hcl/hcl/ast" - hcltoken "github.com/hashicorp/hcl/hcl/token" - "github.com/hashicorp/hcl/json/scanner" - "github.com/hashicorp/hcl/json/token" -) - -type Parser struct { - sc *scanner.Scanner - - // Last read token - tok token.Token - commaPrev token.Token - - enableTrace bool - indent int - n int // buffer size (max = 1) -} - -func newParser(src []byte) *Parser { - return &Parser{ - sc: scanner.New(src), - } -} - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func Parse(src []byte) (*ast.File, error) { - p := newParser(src) - return p.Parse() -} - -var errEofToken = errors.New("EOF token found") - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func (p *Parser) Parse() (*ast.File, error) { - f := &ast.File{} - var err, scerr error - p.sc.Error = func(pos token.Pos, msg string) { - scerr = fmt.Errorf("%s: %s", pos, msg) - } - - // The root must be an object in JSON - object, err := p.object() - if scerr != nil { - return nil, scerr - } - if err != nil { - return nil, err - } - - // We make our final node an object list so it is more HCL compatible - f.Node = object.List - - // Flatten it, which finds patterns and turns them into more HCL-like - // AST trees. - flattenObjects(f.Node) - - return f, nil -} - -func (p *Parser) objectList() (*ast.ObjectList, error) { - defer un(trace(p, "ParseObjectList")) - node := &ast.ObjectList{} - - for { - n, err := p.objectItem() - if err == errEofToken { - break // we are finished - } - - // we don't return a nil node, because might want to use already - // collected items. - if err != nil { - return node, err - } - - node.Add(n) - - // Check for a followup comma. If it isn't a comma, then we're done - if tok := p.scan(); tok.Type != token.COMMA { - break - } - } - - return node, nil -} - -// objectItem parses a single object item -func (p *Parser) objectItem() (*ast.ObjectItem, error) { - defer un(trace(p, "ParseObjectItem")) - - keys, err := p.objectKey() - if err != nil { - return nil, err - } - - o := &ast.ObjectItem{ - Keys: keys, - } - - switch p.tok.Type { - case token.COLON: - pos := p.tok.Pos - o.Assign = hcltoken.Pos{ - Filename: pos.Filename, - Offset: pos.Offset, - Line: pos.Line, - Column: pos.Column, - } - - o.Val, err = p.objectValue() - if err != nil { - return nil, err - } - } - - return o, nil -} - -// objectKey parses an object key and returns a ObjectKey AST -func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { - keyCount := 0 - keys := make([]*ast.ObjectKey, 0) - - for { - tok := p.scan() - switch tok.Type { - case token.EOF: - return nil, errEofToken - case token.STRING: - keyCount++ - keys = append(keys, &ast.ObjectKey{ - Token: p.tok.HCLToken(), - }) - case token.COLON: - // If we have a zero keycount it means that we never got - // an object key, i.e. `{ :`. This is a syntax error. - if keyCount == 0 { - return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) - } - - // Done - return keys, nil - case token.ILLEGAL: - fmt.Println("illegal") - default: - return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) - } - } -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) objectValue() (ast.Node, error) { - defer un(trace(p, "ParseObjectValue")) - tok := p.scan() - - switch tok.Type { - case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING: - return p.literalType() - case token.LBRACE: - return p.objectType() - case token.LBRACK: - return p.listType() - case token.EOF: - return nil, errEofToken - } - - return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok) -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) object() (*ast.ObjectType, error) { - defer un(trace(p, "ParseType")) - tok := p.scan() - - switch tok.Type { - case token.LBRACE: - return p.objectType() - case token.EOF: - return nil, errEofToken - } - - return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok) -} - -// objectType parses an object type and returns a ObjectType AST -func (p *Parser) objectType() (*ast.ObjectType, error) { - defer un(trace(p, "ParseObjectType")) - - // we assume that the currently scanned token is a LBRACE - o := &ast.ObjectType{} - - l, err := p.objectList() - - // if we hit RBRACE, we are good to go (means we parsed all Items), if it's - // not a RBRACE, it's an syntax error and we just return it. - if err != nil && p.tok.Type != token.RBRACE { - return nil, err - } - - o.List = l - return o, nil -} - -// listType parses a list type and returns a ListType AST -func (p *Parser) listType() (*ast.ListType, error) { - defer un(trace(p, "ParseListType")) - - // we assume that the currently scanned token is a LBRACK - l := &ast.ListType{} - - for { - tok := p.scan() - switch tok.Type { - case token.NUMBER, token.FLOAT, token.STRING: - node, err := p.literalType() - if err != nil { - return nil, err - } - - l.Add(node) - case token.COMMA: - continue - case token.LBRACE: - node, err := p.objectType() - if err != nil { - return nil, err - } - - l.Add(node) - case token.BOOL: - // TODO(arslan) should we support? not supported by HCL yet - case token.LBRACK: - // TODO(arslan) should we support nested lists? Even though it's - // written in README of HCL, it's not a part of the grammar - // (not defined in parse.y) - case token.RBRACK: - // finished - return l, nil - default: - return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type) - } - - } -} - -// literalType parses a literal type and returns a LiteralType AST -func (p *Parser) literalType() (*ast.LiteralType, error) { - defer un(trace(p, "ParseLiteral")) - - return &ast.LiteralType{ - Token: p.tok.HCLToken(), - }, nil -} - -// scan returns the next token from the underlying scanner. If a token has -// been unscanned then read that instead. -func (p *Parser) scan() token.Token { - // If we have a token on the buffer, then return it. - if p.n != 0 { - p.n = 0 - return p.tok - } - - p.tok = p.sc.Scan() - return p.tok -} - -// unscan pushes the previously read token back onto the buffer. -func (p *Parser) unscan() { - p.n = 1 -} - -// ---------------------------------------------------------------------------- -// Parsing support - -func (p *Parser) printTrace(a ...interface{}) { - if !p.enableTrace { - return - } - - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = len(dots) - fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) - - i := 2 * p.indent - for i > n { - fmt.Print(dots) - i -= n - } - // i <= n - fmt.Print(dots[0:i]) - fmt.Println(a...) -} - -func trace(p *Parser, msg string) *Parser { - p.printTrace(msg, "(") - p.indent++ - return p -} - -// Usage pattern: defer un(trace(p, "...")) -func un(p *Parser) { - p.indent-- - p.printTrace(")") -} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go deleted file mode 100644 index dd5c72bb3d..0000000000 --- a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go +++ /dev/null @@ -1,451 +0,0 @@ -package scanner - -import ( - "bytes" - "fmt" - "os" - "unicode" - "unicode/utf8" - - "github.com/hashicorp/hcl/json/token" -) - -// eof represents a marker rune for the end of the reader. -const eof = rune(0) - -// Scanner defines a lexical scanner -type Scanner struct { - buf *bytes.Buffer // Source buffer for advancing and scanning - src []byte // Source buffer for immutable access - - // Source Position - srcPos token.Pos // current position - prevPos token.Pos // previous position, used for peek() method - - lastCharLen int // length of last character in bytes - lastLineLen int // length of last line in characters (for correct column reporting) - - tokStart int // token text start position - tokEnd int // token text end position - - // Error is called for each error encountered. If no Error - // function is set, the error is reported to os.Stderr. - Error func(pos token.Pos, msg string) - - // ErrorCount is incremented by one for each error encountered. - ErrorCount int - - // tokPos is the start position of most recently scanned token; set by - // Scan. The Filename field is always left untouched by the Scanner. If - // an error is reported (via Error) and Position is invalid, the scanner is - // not inside a token. - tokPos token.Pos -} - -// New creates and initializes a new instance of Scanner using src as -// its source content. -func New(src []byte) *Scanner { - // even though we accept a src, we read from a io.Reader compatible type - // (*bytes.Buffer). So in the future we might easily change it to streaming - // read. - b := bytes.NewBuffer(src) - s := &Scanner{ - buf: b, - src: src, - } - - // srcPosition always starts with 1 - s.srcPos.Line = 1 - return s -} - -// next reads the next rune from the bufferred reader. Returns the rune(0) if -// an error occurs (or io.EOF is returned). -func (s *Scanner) next() rune { - ch, size, err := s.buf.ReadRune() - if err != nil { - // advance for error reporting - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - return eof - } - - if ch == utf8.RuneError && size == 1 { - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - s.err("illegal UTF-8 encoding") - return ch - } - - // remember last position - s.prevPos = s.srcPos - - s.srcPos.Column++ - s.lastCharLen = size - s.srcPos.Offset += size - - if ch == '\n' { - s.srcPos.Line++ - s.lastLineLen = s.srcPos.Column - s.srcPos.Column = 0 - } - - // debug - // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) - return ch -} - -// unread unreads the previous read Rune and updates the source position -func (s *Scanner) unread() { - if err := s.buf.UnreadRune(); err != nil { - panic(err) // this is user fault, we should catch it - } - s.srcPos = s.prevPos // put back last position -} - -// peek returns the next rune without advancing the reader. -func (s *Scanner) peek() rune { - peek, _, err := s.buf.ReadRune() - if err != nil { - return eof - } - - s.buf.UnreadRune() - return peek -} - -// Scan scans the next token and returns the token. -func (s *Scanner) Scan() token.Token { - ch := s.next() - - // skip white space - for isWhitespace(ch) { - ch = s.next() - } - - var tok token.Type - - // token text markings - s.tokStart = s.srcPos.Offset - s.lastCharLen - - // token position, initial next() is moving the offset by one(size of rune - // actually), though we are interested with the starting point - s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen - if s.srcPos.Column > 0 { - // common case: last character was not a '\n' - s.tokPos.Line = s.srcPos.Line - s.tokPos.Column = s.srcPos.Column - } else { - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - s.tokPos.Line = s.srcPos.Line - 1 - s.tokPos.Column = s.lastLineLen - } - - switch { - case isLetter(ch): - lit := s.scanIdentifier() - if lit == "true" || lit == "false" { - tok = token.BOOL - } else if lit == "null" { - tok = token.NULL - } else { - s.err("illegal char") - } - case isDecimal(ch): - tok = s.scanNumber(ch) - default: - switch ch { - case eof: - tok = token.EOF - case '"': - tok = token.STRING - s.scanString() - case '.': - tok = token.PERIOD - ch = s.peek() - if isDecimal(ch) { - tok = token.FLOAT - ch = s.scanMantissa(ch) - ch = s.scanExponent(ch) - } - case '[': - tok = token.LBRACK - case ']': - tok = token.RBRACK - case '{': - tok = token.LBRACE - case '}': - tok = token.RBRACE - case ',': - tok = token.COMMA - case ':': - tok = token.COLON - case '-': - if isDecimal(s.peek()) { - ch := s.next() - tok = s.scanNumber(ch) - } else { - s.err("illegal char") - } - default: - s.err("illegal char: " + string(ch)) - } - } - - // finish token ending - s.tokEnd = s.srcPos.Offset - - // create token literal - var tokenText string - if s.tokStart >= 0 { - tokenText = string(s.src[s.tokStart:s.tokEnd]) - } - s.tokStart = s.tokEnd // ensure idempotency of tokenText() call - - return token.Token{ - Type: tok, - Pos: s.tokPos, - Text: tokenText, - } -} - -// scanNumber scans a HCL number definition starting with the given rune -func (s *Scanner) scanNumber(ch rune) token.Type { - zero := ch == '0' - pos := s.srcPos - - s.scanMantissa(ch) - ch = s.next() // seek forward - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if ch != eof { - s.unread() - } - - // If we have a larger number and this is zero, error - if zero && pos != s.srcPos { - s.err("numbers cannot start with 0") - } - - return token.NUMBER -} - -// scanMantissa scans the mantissa begining from the rune. It returns the next -// non decimal rune. It's used to determine wheter it's a fraction or exponent. -func (s *Scanner) scanMantissa(ch rune) rune { - scanned := false - for isDecimal(ch) { - ch = s.next() - scanned = true - } - - if scanned && ch != eof { - s.unread() - } - return ch -} - -// scanFraction scans the fraction after the '.' rune -func (s *Scanner) scanFraction(ch rune) rune { - if ch == '.' { - ch = s.peek() // we peek just to see if we can move forward - ch = s.scanMantissa(ch) - } - return ch -} - -// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' -// rune. -func (s *Scanner) scanExponent(ch rune) rune { - if ch == 'e' || ch == 'E' { - ch = s.next() - if ch == '-' || ch == '+' { - ch = s.next() - } - ch = s.scanMantissa(ch) - } - return ch -} - -// scanString scans a quoted string -func (s *Scanner) scanString() { - braces := 0 - for { - // '"' opening already consumed - // read character after quote - ch := s.next() - - if ch == '\n' || ch < 0 || ch == eof { - s.err("literal not terminated") - return - } - - if ch == '"' { - break - } - - // If we're going into a ${} then we can ignore quotes for awhile - if braces == 0 && ch == '$' && s.peek() == '{' { - braces++ - s.next() - } else if braces > 0 && ch == '{' { - braces++ - } - if braces > 0 && ch == '}' { - braces-- - } - - if ch == '\\' { - s.scanEscape() - } - } - - return -} - -// scanEscape scans an escape sequence -func (s *Scanner) scanEscape() rune { - // http://en.cppreference.com/w/cpp/language/escape - ch := s.next() // read character after '/' - switch ch { - case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': - // nothing to do - case '0', '1', '2', '3', '4', '5', '6', '7': - // octal notation - ch = s.scanDigits(ch, 8, 3) - case 'x': - // hexademical notation - ch = s.scanDigits(s.next(), 16, 2) - case 'u': - // universal character name - ch = s.scanDigits(s.next(), 16, 4) - case 'U': - // universal character name - ch = s.scanDigits(s.next(), 16, 8) - default: - s.err("illegal char escape") - } - return ch -} - -// scanDigits scans a rune with the given base for n times. For example an -// octal notation \184 would yield in scanDigits(ch, 8, 3) -func (s *Scanner) scanDigits(ch rune, base, n int) rune { - for n > 0 && digitVal(ch) < base { - ch = s.next() - n-- - } - if n > 0 { - s.err("illegal char escape") - } - - // we scanned all digits, put the last non digit char back - s.unread() - return ch -} - -// scanIdentifier scans an identifier and returns the literal string -func (s *Scanner) scanIdentifier() string { - offs := s.srcPos.Offset - s.lastCharLen - ch := s.next() - for isLetter(ch) || isDigit(ch) || ch == '-' { - ch = s.next() - } - - if ch != eof { - s.unread() // we got identifier, put back latest char - } - - return string(s.src[offs:s.srcPos.Offset]) -} - -// recentPosition returns the position of the character immediately after the -// character or token returned by the last call to Scan. -func (s *Scanner) recentPosition() (pos token.Pos) { - pos.Offset = s.srcPos.Offset - s.lastCharLen - switch { - case s.srcPos.Column > 0: - // common case: last character was not a '\n' - pos.Line = s.srcPos.Line - pos.Column = s.srcPos.Column - case s.lastLineLen > 0: - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - pos.Line = s.srcPos.Line - 1 - pos.Column = s.lastLineLen - default: - // at the beginning of the source - pos.Line = 1 - pos.Column = 1 - } - return -} - -// err prints the error of any scanning to s.Error function. If the function is -// not defined, by default it prints them to os.Stderr -func (s *Scanner) err(msg string) { - s.ErrorCount++ - pos := s.recentPosition() - - if s.Error != nil { - s.Error(pos, msg) - return - } - - fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) -} - -// isHexadecimal returns true if the given rune is a letter -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) -} - -// isHexadecimal returns true if the given rune is a decimal digit -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) -} - -// isHexadecimal returns true if the given rune is a decimal number -func isDecimal(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -// isHexadecimal returns true if the given rune is an hexadecimal number -func isHexadecimal(ch rune) bool { - return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' -} - -// isWhitespace returns true if the rune is a space, tab, newline or carriage return -func isWhitespace(ch rune) bool { - return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' -} - -// digitVal returns the integer value of a given octal,decimal or hexadecimal rune -func digitVal(ch rune) int { - switch { - case '0' <= ch && ch <= '9': - return int(ch - '0') - case 'a' <= ch && ch <= 'f': - return int(ch - 'a' + 10) - case 'A' <= ch && ch <= 'F': - return int(ch - 'A' + 10) - } - return 16 // larger than any legal digit val -} diff --git a/vendor/github.com/hashicorp/hcl/json/token/position.go b/vendor/github.com/hashicorp/hcl/json/token/position.go deleted file mode 100644 index 59c1bb72d4..0000000000 --- a/vendor/github.com/hashicorp/hcl/json/token/position.go +++ /dev/null @@ -1,46 +0,0 @@ -package token - -import "fmt" - -// Pos describes an arbitrary source position -// including the file, line, and column location. -// A Position is valid if the line number is > 0. -type Pos struct { - Filename string // filename, if any - Offset int // offset, starting at 0 - Line int // line number, starting at 1 - Column int // column number, starting at 1 (character count) -} - -// IsValid returns true if the position is valid. -func (p *Pos) IsValid() bool { return p.Line > 0 } - -// String returns a string in one of several forms: -// -// file:line:column valid position with file name -// line:column valid position without file name -// file invalid position with file name -// - invalid position without file name -func (p Pos) String() string { - s := p.Filename - if p.IsValid() { - if s != "" { - s += ":" - } - s += fmt.Sprintf("%d:%d", p.Line, p.Column) - } - if s == "" { - s = "-" - } - return s -} - -// Before reports whether the position p is before u. -func (p Pos) Before(u Pos) bool { - return u.Offset > p.Offset || u.Line > p.Line -} - -// After reports whether the position p is after u. -func (p Pos) After(u Pos) bool { - return u.Offset < p.Offset || u.Line < p.Line -} diff --git a/vendor/github.com/hashicorp/hcl/json/token/token.go b/vendor/github.com/hashicorp/hcl/json/token/token.go deleted file mode 100644 index 95a0c3eee6..0000000000 --- a/vendor/github.com/hashicorp/hcl/json/token/token.go +++ /dev/null @@ -1,118 +0,0 @@ -package token - -import ( - "fmt" - "strconv" - - hcltoken "github.com/hashicorp/hcl/hcl/token" -) - -// Token defines a single HCL token which can be obtained via the Scanner -type Token struct { - Type Type - Pos Pos - Text string -} - -// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) -type Type int - -const ( - // Special tokens - ILLEGAL Type = iota - EOF - - identifier_beg - literal_beg - NUMBER // 12345 - FLOAT // 123.45 - BOOL // true,false - STRING // "abc" - NULL // null - literal_end - identifier_end - - operator_beg - LBRACK // [ - LBRACE // { - COMMA // , - PERIOD // . - COLON // : - - RBRACK // ] - RBRACE // } - - operator_end -) - -var tokens = [...]string{ - ILLEGAL: "ILLEGAL", - - EOF: "EOF", - - NUMBER: "NUMBER", - FLOAT: "FLOAT", - BOOL: "BOOL", - STRING: "STRING", - NULL: "NULL", - - LBRACK: "LBRACK", - LBRACE: "LBRACE", - COMMA: "COMMA", - PERIOD: "PERIOD", - COLON: "COLON", - - RBRACK: "RBRACK", - RBRACE: "RBRACE", -} - -// String returns the string corresponding to the token tok. -func (t Type) String() string { - s := "" - if 0 <= t && t < Type(len(tokens)) { - s = tokens[t] - } - if s == "" { - s = "token(" + strconv.Itoa(int(t)) + ")" - } - return s -} - -// IsIdentifier returns true for tokens corresponding to identifiers and basic -// type literals; it returns false otherwise. -func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } - -// IsLiteral returns true for tokens corresponding to basic type literals; it -// returns false otherwise. -func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end } - -// IsOperator returns true for tokens corresponding to operators and -// delimiters; it returns false otherwise. -func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end } - -// String returns the token's literal text. Note that this is only -// applicable for certain token types, such as token.IDENT, -// token.STRING, etc.. -func (t Token) String() string { - return fmt.Sprintf("%s %s %s", t.Pos.String(), t.Type.String(), t.Text) -} - -// HCLToken converts this token to an HCL token. -// -// The token type must be a literal type or this will panic. -func (t Token) HCLToken() hcltoken.Token { - switch t.Type { - case BOOL: - return hcltoken.Token{Type: hcltoken.BOOL, Text: t.Text} - case FLOAT: - return hcltoken.Token{Type: hcltoken.FLOAT, Text: t.Text} - case NULL: - return hcltoken.Token{Type: hcltoken.STRING, Text: ""} - case NUMBER: - return hcltoken.Token{Type: hcltoken.NUMBER, Text: t.Text} - case STRING: - return hcltoken.Token{Type: hcltoken.STRING, Text: t.Text, JSON: true} - default: - panic(fmt.Sprintf("unimplemented HCLToken for type: %s", t.Type)) - } -} diff --git a/vendor/github.com/hashicorp/hcl/lex.go b/vendor/github.com/hashicorp/hcl/lex.go deleted file mode 100644 index d9993c2928..0000000000 --- a/vendor/github.com/hashicorp/hcl/lex.go +++ /dev/null @@ -1,38 +0,0 @@ -package hcl - -import ( - "unicode" - "unicode/utf8" -) - -type lexModeValue byte - -const ( - lexModeUnknown lexModeValue = iota - lexModeHcl - lexModeJson -) - -// lexMode returns whether we're going to be parsing in JSON -// mode or HCL mode. -func lexMode(v []byte) lexModeValue { - var ( - r rune - w int - offset int - ) - - for { - r, w = utf8.DecodeRune(v[offset:]) - offset += w - if unicode.IsSpace(r) { - continue - } - if r == '{' { - return lexModeJson - } - break - } - - return lexModeHcl -} diff --git a/vendor/github.com/hashicorp/hcl/parse.go b/vendor/github.com/hashicorp/hcl/parse.go deleted file mode 100644 index 1fca53c4ce..0000000000 --- a/vendor/github.com/hashicorp/hcl/parse.go +++ /dev/null @@ -1,39 +0,0 @@ -package hcl - -import ( - "fmt" - - "github.com/hashicorp/hcl/hcl/ast" - hclParser "github.com/hashicorp/hcl/hcl/parser" - jsonParser "github.com/hashicorp/hcl/json/parser" -) - -// ParseBytes accepts as input byte slice and returns ast tree. -// -// Input can be either JSON or HCL -func ParseBytes(in []byte) (*ast.File, error) { - return parse(in) -} - -// ParseString accepts input as a string and returns ast tree. -func ParseString(input string) (*ast.File, error) { - return parse([]byte(input)) -} - -func parse(in []byte) (*ast.File, error) { - switch lexMode(in) { - case lexModeHcl: - return hclParser.Parse(in) - case lexModeJson: - return jsonParser.Parse(in) - } - - return nil, fmt.Errorf("unknown config format") -} - -// Parse parses the given input and returns the root object. -// -// The input format can be either HCL or JSON. -func Parse(input string) (*ast.File, error) { - return parse([]byte(input)) -} diff --git a/vendor/github.com/inconshreveable/mousetrap/LICENSE b/vendor/github.com/inconshreveable/mousetrap/LICENSE deleted file mode 100644 index 5f0d1fb6a7..0000000000 --- a/vendor/github.com/inconshreveable/mousetrap/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2014 Alan Shreve - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/github.com/inconshreveable/mousetrap/README.md b/vendor/github.com/inconshreveable/mousetrap/README.md deleted file mode 100644 index 7a950d1774..0000000000 --- a/vendor/github.com/inconshreveable/mousetrap/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# mousetrap - -mousetrap is a tiny library that answers a single question. - -On a Windows machine, was the process invoked by someone double clicking on -the executable file while browsing in explorer? - -### Motivation - -Windows developers unfamiliar with command line tools will often "double-click" -the executable for a tool. Because most CLI tools print the help and then exit -when invoked without arguments, this is often very frustrating for those users. - -mousetrap provides a way to detect these invocations so that you can provide -more helpful behavior and instructions on how to run the CLI tool. To see what -this looks like, both from an organizational and a technical perspective, see -https://inconshreveable.com/09-09-2014/sweat-the-small-stuff/ - -### The interface - -The library exposes a single interface: - - func StartedByExplorer() (bool) diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/vendor/github.com/inconshreveable/mousetrap/trap_others.go deleted file mode 100644 index 9d2d8a4bab..0000000000 --- a/vendor/github.com/inconshreveable/mousetrap/trap_others.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build !windows - -package mousetrap - -// StartedByExplorer returns true if the program was invoked by the user -// double-clicking on the executable from explorer.exe -// -// It is conservative and returns false if any of the internal calls fail. -// It does not guarantee that the program was run from a terminal. It only can tell you -// whether it was launched from explorer.exe -// -// On non-Windows platforms, it always returns false. -func StartedByExplorer() bool { - return false -} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go deleted file mode 100644 index 336142a5e3..0000000000 --- a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go +++ /dev/null @@ -1,98 +0,0 @@ -// +build windows -// +build !go1.4 - -package mousetrap - -import ( - "fmt" - "os" - "syscall" - "unsafe" -) - -const ( - // defined by the Win32 API - th32cs_snapprocess uintptr = 0x2 -) - -var ( - kernel = syscall.MustLoadDLL("kernel32.dll") - CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot") - Process32First = kernel.MustFindProc("Process32FirstW") - Process32Next = kernel.MustFindProc("Process32NextW") -) - -// ProcessEntry32 structure defined by the Win32 API -type processEntry32 struct { - dwSize uint32 - cntUsage uint32 - th32ProcessID uint32 - th32DefaultHeapID int - th32ModuleID uint32 - cntThreads uint32 - th32ParentProcessID uint32 - pcPriClassBase int32 - dwFlags uint32 - szExeFile [syscall.MAX_PATH]uint16 -} - -func getProcessEntry(pid int) (pe *processEntry32, err error) { - snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0)) - if snapshot == uintptr(syscall.InvalidHandle) { - err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1) - return - } - defer syscall.CloseHandle(syscall.Handle(snapshot)) - - var processEntry processEntry32 - processEntry.dwSize = uint32(unsafe.Sizeof(processEntry)) - ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) - if ok == 0 { - err = fmt.Errorf("Process32First: %v", e1) - return - } - - for { - if processEntry.th32ProcessID == uint32(pid) { - pe = &processEntry - return - } - - ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) - if ok == 0 { - err = fmt.Errorf("Process32Next: %v", e1) - return - } - } -} - -func getppid() (pid int, err error) { - pe, err := getProcessEntry(os.Getpid()) - if err != nil { - return - } - - pid = int(pe.th32ParentProcessID) - return -} - -// StartedByExplorer returns true if the program was invoked by the user double-clicking -// on the executable from explorer.exe -// -// It is conservative and returns false if any of the internal calls fail. -// It does not guarantee that the program was run from a terminal. It only can tell you -// whether it was launched from explorer.exe -func StartedByExplorer() bool { - ppid, err := getppid() - if err != nil { - return false - } - - pe, err := getProcessEntry(ppid) - if err != nil { - return false - } - - name := syscall.UTF16ToString(pe.szExeFile[:]) - return name == "explorer.exe" -} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go deleted file mode 100644 index 9a28e57c3c..0000000000 --- a/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go +++ /dev/null @@ -1,46 +0,0 @@ -// +build windows -// +build go1.4 - -package mousetrap - -import ( - "os" - "syscall" - "unsafe" -) - -func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { - snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) - if err != nil { - return nil, err - } - defer syscall.CloseHandle(snapshot) - var procEntry syscall.ProcessEntry32 - procEntry.Size = uint32(unsafe.Sizeof(procEntry)) - if err = syscall.Process32First(snapshot, &procEntry); err != nil { - return nil, err - } - for { - if procEntry.ProcessID == uint32(pid) { - return &procEntry, nil - } - err = syscall.Process32Next(snapshot, &procEntry) - if err != nil { - return nil, err - } - } -} - -// StartedByExplorer returns true if the program was invoked by the user double-clicking -// on the executable from explorer.exe -// -// It is conservative and returns false if any of the internal calls fail. -// It does not guarantee that the program was run from a terminal. It only can tell you -// whether it was launched from explorer.exe -func StartedByExplorer() bool { - pe, err := getProcessEntry(os.Getppid()) - if err != nil { - return false - } - return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) -} diff --git a/vendor/github.com/magiconair/properties/CHANGELOG.md b/vendor/github.com/magiconair/properties/CHANGELOG.md deleted file mode 100644 index 89ee9381ef..0000000000 --- a/vendor/github.com/magiconair/properties/CHANGELOG.md +++ /dev/null @@ -1,90 +0,0 @@ -## Changelog - -### Unreleased - - * [Issue #15](https://github.com/magiconair/properties/issues/15): Drop gocheck dependency - -### [1.7.1](https://github.com/magiconair/properties/tags/v1.7.1) - 13 Jan 2017 - - * [PR #16](https://github.com/magiconair/properties/pull/16): Keep gofmt happy - * [PR #18](https://github.com/magiconair/properties/pull/18): Fix Delete() function - -### [1.7.0](https://github.com/magiconair/properties/tags/v1.7.0) - 20 Mar 2016 - - * [Issue #10](https://github.com/magiconair/properties/issues/10): Add [LoadURL,LoadURLs,MustLoadURL,MustLoadURLs](http://godoc.org/github.com/magiconair/properties#Properties.LoadURL) method to load properties from a URL. - * [Issue #11](https://github.com/magiconair/properties/issues/11): Add [LoadString,MustLoadString](http://godoc.org/github.com/magiconair/properties#Properties.LoadString) method to load properties from an UTF8 string. - * [PR #8](https://github.com/magiconair/properties/pull/8): Add [MustFlag](http://godoc.org/github.com/magiconair/properties#Properties.MustFlag) method to provide overrides via command line flags. (@pascaldekloe) - -### [1.6.0](https://github.com/magiconair/properties/tags/v1.6.0) - 11 Dec 2015 - - * Add [Decode](http://godoc.org/github.com/magiconair/properties#Properties.Decode) method to populate struct from properties via tags. - -### [1.5.6](https://github.com/magiconair/properties/tags/v1.5.6) - 18 Oct 2015 - - * Vendored in gopkg.in/check.v1 - -### [1.5.5](https://github.com/magiconair/properties/tags/v1.5.5) - 31 Jul 2015 - - * [PR #6](https://github.com/magiconair/properties/pull/6): Add [Delete](http://godoc.org/github.com/magiconair/properties#Properties.Delete) method to remove keys including comments. (@gerbenjacobs) - -### [1.5.4](https://github.com/magiconair/properties/tags/v1.5.4) - 23 Jun 2015 - - * [Issue #5](https://github.com/magiconair/properties/issues/5): Allow disabling of property expansion [DisableExpansion](http://godoc.org/github.com/magiconair/properties#Properties.DisableExpansion). When property expansion is disabled Properties become a simple key/value store and don't check for circular references. - -### [1.5.3](https://github.com/magiconair/properties/tags/v1.5.3) - 02 Jun 2015 - - * [Issue #4](https://github.com/magiconair/properties/issues/4): Maintain key order in [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) and [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) - -### [1.5.2](https://github.com/magiconair/properties/tags/v1.5.2) - 10 Apr 2015 - - * [Issue #3](https://github.com/magiconair/properties/issues/3): Don't print comments in [WriteComment()](http://godoc.org/github.com/magiconair/properties#Properties.WriteComment) if they are all empty - * Add clickable links to README - -### [1.5.1](https://github.com/magiconair/properties/tags/v1.5.1) - 08 Dec 2014 - - * Added [GetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.GetParsedDuration) and [MustGetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.MustGetParsedDuration) for values specified compatible with - [time.ParseDuration()](http://golang.org/pkg/time/#ParseDuration). - -### [1.5.0](https://github.com/magiconair/properties/tags/v1.5.0) - 18 Nov 2014 - - * Added support for single and multi-line comments (reading, writing and updating) - * The order of keys is now preserved - * Calling [Set()](http://godoc.org/github.com/magiconair/properties#Properties.Set) with an empty key now silently ignores the call and does not create a new entry - * Added a [MustSet()](http://godoc.org/github.com/magiconair/properties#Properties.MustSet) method - * Migrated test library from launchpad.net/gocheck to [gopkg.in/check.v1](http://gopkg.in/check.v1) - -### [1.4.2](https://github.com/magiconair/properties/tags/v1.4.2) - 15 Nov 2014 - - * [Issue #2](https://github.com/magiconair/properties/issues/2): Fixed goroutine leak in parser which created two lexers but cleaned up only one - -### [1.4.1](https://github.com/magiconair/properties/tags/v1.4.1) - 13 Nov 2014 - - * [Issue #1](https://github.com/magiconair/properties/issues/1): Fixed bug in Keys() method which returned an empty string - -### [1.4.0](https://github.com/magiconair/properties/tags/v1.4.0) - 23 Sep 2014 - - * Added [Keys()](http://godoc.org/github.com/magiconair/properties#Properties.Keys) to get the keys - * Added [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) and [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) to get a subset of the properties - -### [1.3.0](https://github.com/magiconair/properties/tags/v1.3.0) - 18 Mar 2014 - -* Added support for time.Duration -* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tags/vior configurable (log.Fatal, panic) - custom) -* Changed default of MustXXX() failure from panic to log.Fatal - -### [1.2.0](https://github.com/magiconair/properties/tags/v1.2.0) - 05 Mar 2014 - -* Added MustGet... functions -* Added support for int and uint with range checks on 32 bit platforms - -### [1.1.0](https://github.com/magiconair/properties/tags/v1.1.0) - 20 Jan 2014 - -* Renamed from goproperties to properties -* Added support for expansion of environment vars in - filenames and value expressions -* Fixed bug where value expressions were not at the - start of the string - -### [1.0.0](https://github.com/magiconair/properties/tags/v1.0.0) - 7 Jan 2014 - -* Initial release diff --git a/vendor/github.com/magiconair/properties/LICENSE b/vendor/github.com/magiconair/properties/LICENSE deleted file mode 100644 index 7eab43b6bf..0000000000 --- a/vendor/github.com/magiconair/properties/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -goproperties - properties file decoder for Go - -Copyright (c) 2013-2014 - Frank Schroeder - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/magiconair/properties/README.md b/vendor/github.com/magiconair/properties/README.md deleted file mode 100644 index 5985911836..0000000000 --- a/vendor/github.com/magiconair/properties/README.md +++ /dev/null @@ -1,81 +0,0 @@ -Overview [![Build Status](https://travis-ci.org/magiconair/properties.svg?branch=master)](https://travis-ci.org/magiconair/properties) -======== - -#### Current version: 1.7.1 - -properties is a Go library for reading and writing properties files. - -It supports reading from multiple files or URLs and Spring style recursive -property expansion of expressions like `${key}` to their corresponding value. -Value expressions can refer to other keys like in `${key}` or to environment -variables like in `${USER}`. Filenames can also contain environment variables -like in `/home/${USER}/myapp.properties`. - -Properties can be decoded into structs, maps, arrays and values through -struct tags. - -Comments and the order of keys are preserved. Comments can be modified -and can be written to the output. - -The properties library supports both ISO-8859-1 and UTF-8 encoded data. - -Starting from version 1.3.0 the behavior of the MustXXX() functions is -configurable by providing a custom `ErrorHandler` function. The default has -changed from `panic` to `log.Fatal` but this is configurable and custom -error handling functions can be provided. See the package documentation for -details. - -Getting Started ---------------- - -```go -import ( - "flag" - "github.com/magiconair/properties" -) - -func main() { - p := properties.MustLoadFile("${HOME}/config.properties", properties.UTF8) - - // via getters - host := p.MustGetString("host") - port := p.GetInt("port", 8080) - - // or via decode - type Config struct { - Host string `properties:"host"` - Port int `properties:"port,default=9000"` - Accept []string `properties:"accept,default=image/png;image;gif"` - Timeout time.Duration `properties:"timeout,default=5s"` - } - var cfg Config - if err := p.Decode(&cfg); err != nil { - log.Fatal(err) - } - - // or via flags - p.MustFlag(flag.CommandLine) - - // or via url - p = properties.MustLoadURL("http://host/path") -} - -``` - -Read the full documentation on [GoDoc](https://godoc.org/github.com/magiconair/properties) [![GoDoc](https://godoc.org/github.com/magiconair/properties?status.png)](https://godoc.org/github.com/magiconair/properties) - -Installation and Upgrade ------------------------- - -``` -$ go get -u github.com/magiconair/properties -``` - -License -------- - -2 clause BSD license. See [LICENSE](https://github.com/magiconair/properties/blob/master/LICENSE) file for details. - -ToDo ----- -* Dump contents with passwords and secrets obscured diff --git a/vendor/github.com/magiconair/properties/decode.go b/vendor/github.com/magiconair/properties/decode.go deleted file mode 100644 index 0a961bb044..0000000000 --- a/vendor/github.com/magiconair/properties/decode.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "reflect" - "strconv" - "strings" - "time" -) - -// Decode assigns property values to exported fields of a struct. -// -// Decode traverses v recursively and returns an error if a value cannot be -// converted to the field type or a required value is missing for a field. -// -// The following type dependent decodings are used: -// -// String, boolean, numeric fields have the value of the property key assigned. -// The property key name is the name of the field. A different key and a default -// value can be set in the field's tag. Fields without default value are -// required. If the value cannot be converted to the field type an error is -// returned. -// -// time.Duration fields have the result of time.ParseDuration() assigned. -// -// time.Time fields have the vaule of time.Parse() assigned. The default layout -// is time.RFC3339 but can be set in the field's tag. -// -// Arrays and slices of string, boolean, numeric, time.Duration and time.Time -// fields have the value interpreted as a comma separated list of values. The -// individual values are trimmed of whitespace and empty values are ignored. A -// default value can be provided as a semicolon separated list in the field's -// tag. -// -// Struct fields are decoded recursively using the field name plus "." as -// prefix. The prefix (without dot) can be overridden in the field's tag. -// Default values are not supported in the field's tag. Specify them on the -// fields of the inner struct instead. -// -// Map fields must have a key of type string and are decoded recursively by -// using the field's name plus ".' as prefix and the next element of the key -// name as map key. The prefix (without dot) can be overridden in the field's -// tag. Default values are not supported. -// -// Examples: -// -// // Field is ignored. -// Field int `properties:"-"` -// -// // Field is assigned value of 'Field'. -// Field int -// -// // Field is assigned value of 'myName'. -// Field int `properties:"myName"` -// -// // Field is assigned value of key 'myName' and has a default -// // value 15 if the key does not exist. -// Field int `properties:"myName,default=15"` -// -// // Field is assigned value of key 'Field' and has a default -// // value 15 if the key does not exist. -// Field int `properties:",default=15"` -// -// // Field is assigned value of key 'date' and the date -// // is in format 2006-01-02 -// Field time.Time `properties:"date,layout=2006-01-02"` -// -// // Field is assigned the non-empty and whitespace trimmed -// // values of key 'Field' split by commas. -// Field []string -// -// // Field is assigned the non-empty and whitespace trimmed -// // values of key 'Field' split by commas and has a default -// // value ["a", "b", "c"] if the key does not exist. -// Field []string `properties:",default=a;b;c"` -// -// // Field is decoded recursively with "Field." as key prefix. -// Field SomeStruct -// -// // Field is decoded recursively with "myName." as key prefix. -// Field SomeStruct `properties:"myName"` -// -// // Field is decoded recursively with "Field." as key prefix -// // and the next dotted element of the key as map key. -// Field map[string]string -// -// // Field is decoded recursively with "myName." as key prefix -// // and the next dotted element of the key as map key. -// Field map[string]string `properties:"myName"` -func (p *Properties) Decode(x interface{}) error { - t, v := reflect.TypeOf(x), reflect.ValueOf(x) - if t.Kind() != reflect.Ptr || v.Elem().Type().Kind() != reflect.Struct { - return fmt.Errorf("not a pointer to struct: %s", t) - } - if err := dec(p, "", nil, nil, v); err != nil { - return err - } - return nil -} - -func dec(p *Properties, key string, def *string, opts map[string]string, v reflect.Value) error { - t := v.Type() - - // value returns the property value for key or the default if provided. - value := func() (string, error) { - if val, ok := p.Get(key); ok { - return val, nil - } - if def != nil { - return *def, nil - } - return "", fmt.Errorf("missing required key %s", key) - } - - // conv converts a string to a value of the given type. - conv := func(s string, t reflect.Type) (val reflect.Value, err error) { - var v interface{} - - switch { - case isDuration(t): - v, err = time.ParseDuration(s) - - case isTime(t): - layout := opts["layout"] - if layout == "" { - layout = time.RFC3339 - } - v, err = time.Parse(layout, s) - - case isBool(t): - v, err = boolVal(s), nil - - case isString(t): - v, err = s, nil - - case isFloat(t): - v, err = strconv.ParseFloat(s, 64) - - case isInt(t): - v, err = strconv.ParseInt(s, 10, 64) - - case isUint(t): - v, err = strconv.ParseUint(s, 10, 64) - - default: - return reflect.Zero(t), fmt.Errorf("unsupported type %s", t) - } - if err != nil { - return reflect.Zero(t), err - } - return reflect.ValueOf(v).Convert(t), nil - } - - // keydef returns the property key and the default value based on the - // name of the struct field and the options in the tag. - keydef := func(f reflect.StructField) (string, *string, map[string]string) { - _key, _opts := parseTag(f.Tag.Get("properties")) - - var _def *string - if d, ok := _opts["default"]; ok { - _def = &d - } - if _key != "" { - return _key, _def, _opts - } - return f.Name, _def, _opts - } - - switch { - case isDuration(t) || isTime(t) || isBool(t) || isString(t) || isFloat(t) || isInt(t) || isUint(t): - s, err := value() - if err != nil { - return err - } - val, err := conv(s, t) - if err != nil { - return err - } - v.Set(val) - - case isPtr(t): - return dec(p, key, def, opts, v.Elem()) - - case isStruct(t): - for i := 0; i < v.NumField(); i++ { - fv := v.Field(i) - fk, def, opts := keydef(t.Field(i)) - if !fv.CanSet() { - return fmt.Errorf("cannot set %s", t.Field(i).Name) - } - if fk == "-" { - continue - } - if key != "" { - fk = key + "." + fk - } - if err := dec(p, fk, def, opts, fv); err != nil { - return err - } - } - return nil - - case isArray(t): - val, err := value() - if err != nil { - return err - } - vals := split(val, ";") - a := reflect.MakeSlice(t, 0, len(vals)) - for _, s := range vals { - val, err := conv(s, t.Elem()) - if err != nil { - return err - } - a = reflect.Append(a, val) - } - v.Set(a) - - case isMap(t): - valT := t.Elem() - m := reflect.MakeMap(t) - for postfix := range p.FilterStripPrefix(key + ".").m { - pp := strings.SplitN(postfix, ".", 2) - mk, mv := pp[0], reflect.New(valT) - if err := dec(p, key+"."+mk, nil, nil, mv); err != nil { - return err - } - m.SetMapIndex(reflect.ValueOf(mk), mv.Elem()) - } - v.Set(m) - - default: - return fmt.Errorf("unsupported type %s", t) - } - return nil -} - -// split splits a string on sep, trims whitespace of elements -// and omits empty elements -func split(s string, sep string) []string { - var a []string - for _, v := range strings.Split(s, sep) { - if v = strings.TrimSpace(v); v != "" { - a = append(a, v) - } - } - return a -} - -// parseTag parses a "key,k=v,k=v,..." -func parseTag(tag string) (key string, opts map[string]string) { - opts = map[string]string{} - for i, s := range strings.Split(tag, ",") { - if i == 0 { - key = s - continue - } - - pp := strings.SplitN(s, "=", 2) - if len(pp) == 1 { - opts[pp[0]] = "" - } else { - opts[pp[0]] = pp[1] - } - } - return key, opts -} - -func isArray(t reflect.Type) bool { return t.Kind() == reflect.Array || t.Kind() == reflect.Slice } -func isBool(t reflect.Type) bool { return t.Kind() == reflect.Bool } -func isDuration(t reflect.Type) bool { return t == reflect.TypeOf(time.Second) } -func isMap(t reflect.Type) bool { return t.Kind() == reflect.Map } -func isPtr(t reflect.Type) bool { return t.Kind() == reflect.Ptr } -func isString(t reflect.Type) bool { return t.Kind() == reflect.String } -func isStruct(t reflect.Type) bool { return t.Kind() == reflect.Struct } -func isTime(t reflect.Type) bool { return t == reflect.TypeOf(time.Time{}) } -func isFloat(t reflect.Type) bool { - return t.Kind() == reflect.Float32 || t.Kind() == reflect.Float64 -} -func isInt(t reflect.Type) bool { - return t.Kind() == reflect.Int || t.Kind() == reflect.Int8 || t.Kind() == reflect.Int16 || t.Kind() == reflect.Int32 || t.Kind() == reflect.Int64 -} -func isUint(t reflect.Type) bool { - return t.Kind() == reflect.Uint || t.Kind() == reflect.Uint8 || t.Kind() == reflect.Uint16 || t.Kind() == reflect.Uint32 || t.Kind() == reflect.Uint64 -} diff --git a/vendor/github.com/magiconair/properties/doc.go b/vendor/github.com/magiconair/properties/doc.go deleted file mode 100644 index 36c8368089..0000000000 --- a/vendor/github.com/magiconair/properties/doc.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package properties provides functions for reading and writing -// ISO-8859-1 and UTF-8 encoded .properties files and has -// support for recursive property expansion. -// -// Java properties files are ISO-8859-1 encoded and use Unicode -// literals for characters outside the ISO character set. Unicode -// literals can be used in UTF-8 encoded properties files but -// aren't necessary. -// -// To load a single properties file use MustLoadFile(): -// -// p := properties.MustLoadFile(filename, properties.UTF8) -// -// To load multiple properties files use MustLoadFiles() -// which loads the files in the given order and merges the -// result. Missing properties files can be ignored if the -// 'ignoreMissing' flag is set to true. -// -// Filenames can contain environment variables which are expanded -// before loading. -// -// f1 := "/etc/myapp/myapp.conf" -// f2 := "/home/${USER}/myapp.conf" -// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true) -// -// All of the different key/value delimiters ' ', ':' and '=' are -// supported as well as the comment characters '!' and '#' and -// multi-line values. -// -// ! this is a comment -// # and so is this -// -// # the following expressions are equal -// key value -// key=value -// key:value -// key = value -// key : value -// key = val\ -// ue -// -// Properties stores all comments preceding a key and provides -// GetComments() and SetComments() methods to retrieve and -// update them. The convenience functions GetComment() and -// SetComment() allow access to the last comment. The -// WriteComment() method writes properties files including -// the comments and with the keys in the original order. -// This can be used for sanitizing properties files. -// -// Property expansion is recursive and circular references -// and malformed expressions are not allowed and cause an -// error. Expansion of environment variables is supported. -// -// # standard property -// key = value -// -// # property expansion: key2 = value -// key2 = ${key} -// -// # recursive expansion: key3 = value -// key3 = ${key2} -// -// # circular reference (error) -// key = ${key} -// -// # malformed expression (error) -// key = ${ke -// -// # refers to the users' home dir -// home = ${HOME} -// -// # local key takes precendence over env var: u = foo -// USER = foo -// u = ${USER} -// -// The default property expansion format is ${key} but can be -// changed by setting different pre- and postfix values on the -// Properties object. -// -// p := properties.NewProperties() -// p.Prefix = "#[" -// p.Postfix = "]#" -// -// Properties provides convenience functions for getting typed -// values with default values if the key does not exist or the -// type conversion failed. -// -// # Returns true if the value is either "1", "on", "yes" or "true" -// # Returns false for every other value and the default value if -// # the key does not exist. -// v = p.GetBool("key", false) -// -// # Returns the value if the key exists and the format conversion -// # was successful. Otherwise, the default value is returned. -// v = p.GetInt64("key", 999) -// v = p.GetUint64("key", 999) -// v = p.GetFloat64("key", 123.0) -// v = p.GetString("key", "def") -// v = p.GetDuration("key", 999) -// -// As an alterantive properties may be applied with the standard -// library's flag implementation at any time. -// -// # Standard configuration -// v = flag.Int("key", 999, "help message") -// flag.Parse() -// -// # Merge p into the flag set -// p.MustFlag(flag.CommandLine) -// -// Properties provides several MustXXX() convenience functions -// which will terminate the app if an error occurs. The behavior -// of the failure is configurable and the default is to call -// log.Fatal(err). To have the MustXXX() functions panic instead -// of logging the error set a different ErrorHandler before -// you use the Properties package. -// -// properties.ErrorHandler = properties.PanicHandler -// -// # Will panic instead of logging an error -// p := properties.MustLoadFile("config.properties") -// -// You can also provide your own ErrorHandler function. The only requirement -// is that the error handler function must exit after handling the error. -// -// properties.ErrorHandler = func(err error) { -// fmt.Println(err) -// os.Exit(1) -// } -// -// # Will write to stdout and then exit -// p := properties.MustLoadFile("config.properties") -// -// Properties can also be loaded into a struct via the `Decode` -// method, e.g. -// -// type S struct { -// A string `properties:"a,default=foo"` -// D time.Duration `properties:"timeout,default=5s"` -// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"` -// } -// -// See `Decode()` method for the full documentation. -// -// The following documents provide a description of the properties -// file format. -// -// http://en.wikipedia.org/wiki/.properties -// -// http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29 -// -package properties diff --git a/vendor/github.com/magiconair/properties/integrate.go b/vendor/github.com/magiconair/properties/integrate.go deleted file mode 100644 index 0d775e0350..0000000000 --- a/vendor/github.com/magiconair/properties/integrate.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import "flag" - -// MustFlag sets flags that are skipped by dst.Parse when p contains -// the respective key for flag.Flag.Name. -// -// It's use is recommended with command line arguments as in: -// flag.Parse() -// p.MustFlag(flag.CommandLine) -func (p *Properties) MustFlag(dst *flag.FlagSet) { - m := make(map[string]*flag.Flag) - dst.VisitAll(func(f *flag.Flag) { - m[f.Name] = f - }) - dst.Visit(func(f *flag.Flag) { - delete(m, f.Name) // overridden - }) - - for name, f := range m { - v, ok := p.Get(name) - if !ok { - continue - } - - if err := f.Value.Set(v); err != nil { - ErrorHandler(err) - } - } -} diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go deleted file mode 100644 index a3cba0319d..0000000000 --- a/vendor/github.com/magiconair/properties/lex.go +++ /dev/null @@ -1,408 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -// Parts of the lexer are from the template/text/parser package -// For these parts the following applies: -// -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file of the go 1.2 -// distribution. - -package properties - -import ( - "fmt" - "strconv" - "strings" - "unicode/utf8" -) - -// item represents a token or text string returned from the scanner. -type item struct { - typ itemType // The type of this item. - pos int // The starting position, in bytes, of this item in the input string. - val string // The value of this item. -} - -func (i item) String() string { - switch { - case i.typ == itemEOF: - return "EOF" - case i.typ == itemError: - return i.val - case len(i.val) > 10: - return fmt.Sprintf("%.10q...", i.val) - } - return fmt.Sprintf("%q", i.val) -} - -// itemType identifies the type of lex items. -type itemType int - -const ( - itemError itemType = iota // error occurred; value is text of error - itemEOF - itemKey // a key - itemValue // a value - itemComment // a comment -) - -// defines a constant for EOF -const eof = -1 - -// permitted whitespace characters space, FF and TAB -const whitespace = " \f\t" - -// stateFn represents the state of the scanner as a function that returns the next state. -type stateFn func(*lexer) stateFn - -// lexer holds the state of the scanner. -type lexer struct { - input string // the string being scanned - state stateFn // the next lexing function to enter - pos int // current position in the input - start int // start position of this item - width int // width of last rune read from input - lastPos int // position of most recent item returned by nextItem - runes []rune // scanned runes for this item - items chan item // channel of scanned items -} - -// next returns the next rune in the input. -func (l *lexer) next() rune { - if l.pos >= len(l.input) { - l.width = 0 - return eof - } - r, w := utf8.DecodeRuneInString(l.input[l.pos:]) - l.width = w - l.pos += l.width - return r -} - -// peek returns but does not consume the next rune in the input. -func (l *lexer) peek() rune { - r := l.next() - l.backup() - return r -} - -// backup steps back one rune. Can only be called once per call of next. -func (l *lexer) backup() { - l.pos -= l.width -} - -// emit passes an item back to the client. -func (l *lexer) emit(t itemType) { - i := item{t, l.start, string(l.runes)} - l.items <- i - l.start = l.pos - l.runes = l.runes[:0] -} - -// ignore skips over the pending input before this point. -func (l *lexer) ignore() { - l.start = l.pos -} - -// appends the rune to the current value -func (l *lexer) appendRune(r rune) { - l.runes = append(l.runes, r) -} - -// accept consumes the next rune if it's from the valid set. -func (l *lexer) accept(valid string) bool { - if strings.ContainsRune(valid, l.next()) { - return true - } - l.backup() - return false -} - -// acceptRun consumes a run of runes from the valid set. -func (l *lexer) acceptRun(valid string) { - for strings.ContainsRune(valid, l.next()) { - } - l.backup() -} - -// acceptRunUntil consumes a run of runes up to a terminator. -func (l *lexer) acceptRunUntil(term rune) { - for term != l.next() { - } - l.backup() -} - -// hasText returns true if the current parsed text is not empty. -func (l *lexer) isNotEmpty() bool { - return l.pos > l.start -} - -// lineNumber reports which line we're on, based on the position of -// the previous item returned by nextItem. Doing it this way -// means we don't have to worry about peek double counting. -func (l *lexer) lineNumber() int { - return 1 + strings.Count(l.input[:l.lastPos], "\n") -} - -// errorf returns an error token and terminates the scan by passing -// back a nil pointer that will be the next state, terminating l.nextItem. -func (l *lexer) errorf(format string, args ...interface{}) stateFn { - l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} - return nil -} - -// nextItem returns the next item from the input. -func (l *lexer) nextItem() item { - i := <-l.items - l.lastPos = i.pos - return i -} - -// lex creates a new scanner for the input string. -func lex(input string) *lexer { - l := &lexer{ - input: input, - items: make(chan item), - runes: make([]rune, 0, 32), - } - go l.run() - return l -} - -// run runs the state machine for the lexer. -func (l *lexer) run() { - for l.state = lexBeforeKey(l); l.state != nil; { - l.state = l.state(l) - } -} - -// state functions - -// lexBeforeKey scans until a key begins. -func lexBeforeKey(l *lexer) stateFn { - switch r := l.next(); { - case isEOF(r): - l.emit(itemEOF) - return nil - - case isEOL(r): - l.ignore() - return lexBeforeKey - - case isComment(r): - return lexComment - - case isWhitespace(r): - l.acceptRun(whitespace) - l.ignore() - return lexKey - - default: - l.backup() - return lexKey - } -} - -// lexComment scans a comment line. The comment character has already been scanned. -func lexComment(l *lexer) stateFn { - l.acceptRun(whitespace) - l.ignore() - for { - switch r := l.next(); { - case isEOF(r): - l.ignore() - l.emit(itemEOF) - return nil - case isEOL(r): - l.emit(itemComment) - return lexBeforeKey - default: - l.appendRune(r) - } - } -} - -// lexKey scans the key up to a delimiter -func lexKey(l *lexer) stateFn { - var r rune - -Loop: - for { - switch r = l.next(); { - - case isEscape(r): - err := l.scanEscapeSequence() - if err != nil { - return l.errorf(err.Error()) - } - - case isEndOfKey(r): - l.backup() - break Loop - - case isEOF(r): - break Loop - - default: - l.appendRune(r) - } - } - - if len(l.runes) > 0 { - l.emit(itemKey) - } - - if isEOF(r) { - l.emit(itemEOF) - return nil - } - - return lexBeforeValue -} - -// lexBeforeValue scans the delimiter between key and value. -// Leading and trailing whitespace is ignored. -// We expect to be just after the key. -func lexBeforeValue(l *lexer) stateFn { - l.acceptRun(whitespace) - l.accept(":=") - l.acceptRun(whitespace) - l.ignore() - return lexValue -} - -// lexValue scans text until the end of the line. We expect to be just after the delimiter. -func lexValue(l *lexer) stateFn { - for { - switch r := l.next(); { - case isEscape(r): - if isEOL(l.peek()) { - l.next() - l.acceptRun(whitespace) - } else { - err := l.scanEscapeSequence() - if err != nil { - return l.errorf(err.Error()) - } - } - - case isEOL(r): - l.emit(itemValue) - l.ignore() - return lexBeforeKey - - case isEOF(r): - l.emit(itemValue) - l.emit(itemEOF) - return nil - - default: - l.appendRune(r) - } - } -} - -// scanEscapeSequence scans either one of the escaped characters -// or a unicode literal. We expect to be after the escape character. -func (l *lexer) scanEscapeSequence() error { - switch r := l.next(); { - - case isEscapedCharacter(r): - l.appendRune(decodeEscapedCharacter(r)) - return nil - - case atUnicodeLiteral(r): - return l.scanUnicodeLiteral() - - case isEOF(r): - return fmt.Errorf("premature EOF") - - // silently drop the escape character and append the rune as is - default: - l.appendRune(r) - return nil - } -} - -// scans a unicode literal in the form \uXXXX. We expect to be after the \u. -func (l *lexer) scanUnicodeLiteral() error { - // scan the digits - d := make([]rune, 4) - for i := 0; i < 4; i++ { - d[i] = l.next() - if d[i] == eof || !strings.ContainsRune("0123456789abcdefABCDEF", d[i]) { - return fmt.Errorf("invalid unicode literal") - } - } - - // decode the digits into a rune - r, err := strconv.ParseInt(string(d), 16, 0) - if err != nil { - return err - } - - l.appendRune(rune(r)) - return nil -} - -// decodeEscapedCharacter returns the unescaped rune. We expect to be after the escape character. -func decodeEscapedCharacter(r rune) rune { - switch r { - case 'f': - return '\f' - case 'n': - return '\n' - case 'r': - return '\r' - case 't': - return '\t' - default: - return r - } -} - -// atUnicodeLiteral reports whether we are at a unicode literal. -// The escape character has already been consumed. -func atUnicodeLiteral(r rune) bool { - return r == 'u' -} - -// isComment reports whether we are at the start of a comment. -func isComment(r rune) bool { - return r == '#' || r == '!' -} - -// isEndOfKey reports whether the rune terminates the current key. -func isEndOfKey(r rune) bool { - return strings.ContainsRune(" \f\t\r\n:=", r) -} - -// isEOF reports whether we are at EOF. -func isEOF(r rune) bool { - return r == eof -} - -// isEOL reports whether we are at a new line character. -func isEOL(r rune) bool { - return r == '\n' || r == '\r' -} - -// isEscape reports whether the rune is the escape character which -// prefixes unicode literals and other escaped characters. -func isEscape(r rune) bool { - return r == '\\' -} - -// isEscapedCharacter reports whether we are at one of the characters that need escaping. -// The escape character has already been consumed. -func isEscapedCharacter(r rune) bool { - return strings.ContainsRune(" :=fnrt", r) -} - -// isWhitespace reports whether the rune is a whitespace character. -func isWhitespace(r rune) bool { - return strings.ContainsRune(whitespace, r) -} diff --git a/vendor/github.com/magiconair/properties/load.go b/vendor/github.com/magiconair/properties/load.go deleted file mode 100644 index 701a86d4c2..0000000000 --- a/vendor/github.com/magiconair/properties/load.go +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "io/ioutil" - "net/http" - "os" - "strings" -) - -// Encoding specifies encoding of the input data. -type Encoding uint - -const ( - // UTF8 interprets the input data as UTF-8. - UTF8 Encoding = 1 << iota - - // ISO_8859_1 interprets the input data as ISO-8859-1. - ISO_8859_1 -) - -// Load reads a buffer into a Properties struct. -func Load(buf []byte, enc Encoding) (*Properties, error) { - return loadBuf(buf, enc) -} - -// LoadString reads an UTF8 string into a properties struct. -func LoadString(s string) (*Properties, error) { - return loadBuf([]byte(s), UTF8) -} - -// LoadFile reads a file into a Properties struct. -func LoadFile(filename string, enc Encoding) (*Properties, error) { - return loadAll([]string{filename}, enc, false) -} - -// LoadFiles reads multiple files in the given order into -// a Properties struct. If 'ignoreMissing' is true then -// non-existent files will not be reported as error. -func LoadFiles(filenames []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - return loadAll(filenames, enc, ignoreMissing) -} - -// LoadURL reads the content of the URL into a Properties struct. -// -// The encoding is determined via the Content-Type header which -// should be set to 'text/plain'. If the 'charset' parameter is -// missing, 'iso-8859-1' or 'latin1' the encoding is set to -// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the -// encoding is set to UTF-8. A missing content type header is -// interpreted as 'text/plain; charset=utf-8'. -func LoadURL(url string) (*Properties, error) { - return loadAll([]string{url}, UTF8, false) -} - -// LoadURLs reads the content of multiple URLs in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code will -// not be reported as error. See LoadURL for the Content-Type header -// and the encoding. -func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) { - return loadAll(urls, UTF8, ignoreMissing) -} - -// LoadAll reads the content of multiple URLs or files in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will -// not be reported as error. Encoding sets the encoding for files. For the URLs please see -// LoadURL for the Content-Type header and the encoding. -func LoadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - return loadAll(names, enc, ignoreMissing) -} - -// MustLoadString reads an UTF8 string into a Properties struct and -// panics on error. -func MustLoadString(s string) *Properties { - return must(LoadString(s)) -} - -// MustLoadFile reads a file into a Properties struct and -// panics on error. -func MustLoadFile(filename string, enc Encoding) *Properties { - return must(LoadFile(filename, enc)) -} - -// MustLoadFiles reads multiple files in the given order into -// a Properties struct and panics on error. If 'ignoreMissing' -// is true then non-existent files will not be reported as error. -func MustLoadFiles(filenames []string, enc Encoding, ignoreMissing bool) *Properties { - return must(LoadFiles(filenames, enc, ignoreMissing)) -} - -// MustLoadURL reads the content of a URL into a Properties struct and -// panics on error. -func MustLoadURL(url string) *Properties { - return must(LoadURL(url)) -} - -// MustLoadURLs reads the content of multiple URLs in the given order into a -// Properties struct and panics on error. If 'ignoreMissing' is true then a 404 -// status code will not be reported as error. -func MustLoadURLs(urls []string, ignoreMissing bool) *Properties { - return must(LoadURLs(urls, ignoreMissing)) -} - -// MustLoadAll reads the content of multiple URLs or files in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will -// not be reported as error. Encoding sets the encoding for files. For the URLs please see -// LoadURL for the Content-Type header and the encoding. It panics on error. -func MustLoadAll(names []string, enc Encoding, ignoreMissing bool) *Properties { - return must(LoadAll(names, enc, ignoreMissing)) -} - -func loadBuf(buf []byte, enc Encoding) (*Properties, error) { - p, err := parse(convert(buf, enc)) - if err != nil { - return nil, err - } - return p, p.check() -} - -func loadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - result := NewProperties() - for _, name := range names { - n, err := expandName(name) - if err != nil { - return nil, err - } - var p *Properties - if strings.HasPrefix(n, "http://") || strings.HasPrefix(n, "https://") { - p, err = loadURL(n, ignoreMissing) - } else { - p, err = loadFile(n, enc, ignoreMissing) - } - if err != nil { - return nil, err - } - result.Merge(p) - - } - return result, result.check() -} - -func loadFile(filename string, enc Encoding, ignoreMissing bool) (*Properties, error) { - data, err := ioutil.ReadFile(filename) - if err != nil { - if ignoreMissing && os.IsNotExist(err) { - LogPrintf("properties: %s not found. skipping", filename) - return NewProperties(), nil - } - return nil, err - } - p, err := parse(convert(data, enc)) - if err != nil { - return nil, err - } - return p, nil -} - -func loadURL(url string, ignoreMissing bool) (*Properties, error) { - resp, err := http.Get(url) - if err != nil { - return nil, fmt.Errorf("properties: error fetching %q. %s", url, err) - } - if resp.StatusCode == 404 && ignoreMissing { - LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode) - return NewProperties(), nil - } - if resp.StatusCode != 200 { - return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode) - } - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) - } - if err = resp.Body.Close(); err != nil { - return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) - } - - ct := resp.Header.Get("Content-Type") - var enc Encoding - switch strings.ToLower(ct) { - case "text/plain", "text/plain; charset=iso-8859-1", "text/plain; charset=latin1": - enc = ISO_8859_1 - case "", "text/plain; charset=utf-8": - enc = UTF8 - default: - return nil, fmt.Errorf("properties: invalid content type %s", ct) - } - - p, err := parse(convert(body, enc)) - if err != nil { - return nil, err - } - return p, nil -} - -func must(p *Properties, err error) *Properties { - if err != nil { - ErrorHandler(err) - } - return p -} - -// expandName expands ${ENV_VAR} expressions in a name. -// If the environment variable does not exist then it will be replaced -// with an empty string. Malformed expressions like "${ENV_VAR" will -// be reported as error. -func expandName(name string) (string, error) { - return expand(name, make(map[string]bool), "${", "}", make(map[string]string)) -} - -// Interprets a byte buffer either as an ISO-8859-1 or UTF-8 encoded string. -// For ISO-8859-1 we can convert each byte straight into a rune since the -// first 256 unicode code points cover ISO-8859-1. -func convert(buf []byte, enc Encoding) string { - switch enc { - case UTF8: - return string(buf) - case ISO_8859_1: - runes := make([]rune, len(buf)) - for i, b := range buf { - runes[i] = rune(b) - } - return string(runes) - default: - ErrorHandler(fmt.Errorf("unsupported encoding %v", enc)) - } - panic("ErrorHandler should exit") -} diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go deleted file mode 100644 index 90f555cb93..0000000000 --- a/vendor/github.com/magiconair/properties/parser.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "runtime" -) - -type parser struct { - lex *lexer -} - -func parse(input string) (properties *Properties, err error) { - p := &parser{lex: lex(input)} - defer p.recover(&err) - - properties = NewProperties() - key := "" - comments := []string{} - - for { - token := p.expectOneOf(itemComment, itemKey, itemEOF) - switch token.typ { - case itemEOF: - goto done - case itemComment: - comments = append(comments, token.val) - continue - case itemKey: - key = token.val - if _, ok := properties.m[key]; !ok { - properties.k = append(properties.k, key) - } - } - - token = p.expectOneOf(itemValue, itemEOF) - if len(comments) > 0 { - properties.c[key] = comments - comments = []string{} - } - switch token.typ { - case itemEOF: - properties.m[key] = "" - goto done - case itemValue: - properties.m[key] = token.val - } - } - -done: - return properties, nil -} - -func (p *parser) errorf(format string, args ...interface{}) { - format = fmt.Sprintf("properties: Line %d: %s", p.lex.lineNumber(), format) - panic(fmt.Errorf(format, args...)) -} - -func (p *parser) expect(expected itemType) (token item) { - token = p.lex.nextItem() - if token.typ != expected { - p.unexpected(token) - } - return token -} - -func (p *parser) expectOneOf(expected ...itemType) (token item) { - token = p.lex.nextItem() - for _, v := range expected { - if token.typ == v { - return token - } - } - p.unexpected(token) - panic("unexpected token") -} - -func (p *parser) unexpected(token item) { - p.errorf(token.String()) -} - -// recover is the handler that turns panics into returns from the top level of Parse. -func (p *parser) recover(errp *error) { - e := recover() - if e != nil { - if _, ok := e.(runtime.Error); ok { - panic(e) - } - *errp = e.(error) - } - return -} diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go deleted file mode 100644 index 80360c96ae..0000000000 --- a/vendor/github.com/magiconair/properties/properties.go +++ /dev/null @@ -1,777 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -// BUG(frank): Set() does not check for invalid unicode literals since this is currently handled by the lexer. -// BUG(frank): Write() does not allow to configure the newline character. Therefore, on Windows LF is used. - -import ( - "fmt" - "io" - "log" - "os" - "regexp" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -// ErrorHandlerFunc defines the type of function which handles failures -// of the MustXXX() functions. An error handler function must exit -// the application after handling the error. -type ErrorHandlerFunc func(error) - -// ErrorHandler is the function which handles failures of the MustXXX() -// functions. The default is LogFatalHandler. -var ErrorHandler ErrorHandlerFunc = LogFatalHandler - -// LogHandlerFunc defines the function prototype for logging errors. -type LogHandlerFunc func(fmt string, args ...interface{}) - -// LogPrintf defines a log handler which uses log.Printf. -var LogPrintf LogHandlerFunc = log.Printf - -// LogFatalHandler handles the error by logging a fatal error and exiting. -func LogFatalHandler(err error) { - log.Fatal(err) -} - -// PanicHandler handles the error by panicking. -func PanicHandler(err error) { - panic(err) -} - -// ----------------------------------------------------------------------------- - -// A Properties contains the key/value pairs from the properties input. -// All values are stored in unexpanded form and are expanded at runtime -type Properties struct { - // Pre-/Postfix for property expansion. - Prefix string - Postfix string - - // DisableExpansion controls the expansion of properties on Get() - // and the check for circular references on Set(). When set to - // true Properties behaves like a simple key/value store and does - // not check for circular references on Get() or on Set(). - DisableExpansion bool - - // Stores the key/value pairs - m map[string]string - - // Stores the comments per key. - c map[string][]string - - // Stores the keys in order of appearance. - k []string -} - -// NewProperties creates a new Properties struct with the default -// configuration for "${key}" expressions. -func NewProperties() *Properties { - return &Properties{ - Prefix: "${", - Postfix: "}", - m: map[string]string{}, - c: map[string][]string{}, - k: []string{}, - } -} - -// Get returns the expanded value for the given key if exists. -// Otherwise, ok is false. -func (p *Properties) Get(key string) (value string, ok bool) { - v, ok := p.m[key] - if p.DisableExpansion { - return v, ok - } - if !ok { - return "", false - } - - expanded, err := p.expand(v) - - // we guarantee that the expanded value is free of - // circular references and malformed expressions - // so we panic if we still get an error here. - if err != nil { - ErrorHandler(fmt.Errorf("%s in %q", err, key+" = "+v)) - } - - return expanded, true -} - -// MustGet returns the expanded value for the given key if exists. -// Otherwise, it panics. -func (p *Properties) MustGet(key string) string { - if v, ok := p.Get(key); ok { - return v - } - ErrorHandler(invalidKeyError(key)) - panic("ErrorHandler should exit") -} - -// ---------------------------------------------------------------------------- - -// ClearComments removes the comments for all keys. -func (p *Properties) ClearComments() { - p.c = map[string][]string{} -} - -// ---------------------------------------------------------------------------- - -// GetComment returns the last comment before the given key or an empty string. -func (p *Properties) GetComment(key string) string { - comments, ok := p.c[key] - if !ok || len(comments) == 0 { - return "" - } - return comments[len(comments)-1] -} - -// ---------------------------------------------------------------------------- - -// GetComments returns all comments that appeared before the given key or nil. -func (p *Properties) GetComments(key string) []string { - if comments, ok := p.c[key]; ok { - return comments - } - return nil -} - -// ---------------------------------------------------------------------------- - -// SetComment sets the comment for the key. -func (p *Properties) SetComment(key, comment string) { - p.c[key] = []string{comment} -} - -// ---------------------------------------------------------------------------- - -// SetComments sets the comments for the key. If the comments are nil then -// all comments for this key are deleted. -func (p *Properties) SetComments(key string, comments []string) { - if comments == nil { - delete(p.c, key) - return - } - p.c[key] = comments -} - -// ---------------------------------------------------------------------------- - -// GetBool checks if the expanded value is one of '1', 'yes', -// 'true' or 'on' if the key exists. The comparison is case-insensitive. -// If the key does not exist the default value is returned. -func (p *Properties) GetBool(key string, def bool) bool { - v, err := p.getBool(key) - if err != nil { - return def - } - return v -} - -// MustGetBool checks if the expanded value is one of '1', 'yes', -// 'true' or 'on' if the key exists. The comparison is case-insensitive. -// If the key does not exist the function panics. -func (p *Properties) MustGetBool(key string) bool { - v, err := p.getBool(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getBool(key string) (value bool, err error) { - if v, ok := p.Get(key); ok { - return boolVal(v), nil - } - return false, invalidKeyError(key) -} - -func boolVal(v string) bool { - v = strings.ToLower(v) - return v == "1" || v == "true" || v == "yes" || v == "on" -} - -// ---------------------------------------------------------------------------- - -// GetDuration parses the expanded value as an time.Duration (in ns) if the -// key exists. If key does not exist or the value cannot be parsed the default -// value is returned. In almost all cases you want to use GetParsedDuration(). -func (p *Properties) GetDuration(key string, def time.Duration) time.Duration { - v, err := p.getInt64(key) - if err != nil { - return def - } - return time.Duration(v) -} - -// MustGetDuration parses the expanded value as an time.Duration (in ns) if -// the key exists. If key does not exist or the value cannot be parsed the -// function panics. In almost all cases you want to use MustGetParsedDuration(). -func (p *Properties) MustGetDuration(key string) time.Duration { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return time.Duration(v) -} - -// ---------------------------------------------------------------------------- - -// GetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetParsedDuration(key string, def time.Duration) time.Duration { - s, ok := p.Get(key) - if !ok { - return def - } - v, err := time.ParseDuration(s) - if err != nil { - return def - } - return v -} - -// MustGetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetParsedDuration(key string) time.Duration { - s, ok := p.Get(key) - if !ok { - ErrorHandler(invalidKeyError(key)) - } - v, err := time.ParseDuration(s) - if err != nil { - ErrorHandler(err) - } - return v -} - -// ---------------------------------------------------------------------------- - -// GetFloat64 parses the expanded value as a float64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetFloat64(key string, def float64) float64 { - v, err := p.getFloat64(key) - if err != nil { - return def - } - return v -} - -// MustGetFloat64 parses the expanded value as a float64 if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetFloat64(key string) float64 { - v, err := p.getFloat64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getFloat64(key string) (value float64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseFloat(v, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetInt parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. If the value does not fit into an int the -// function panics with an out of range error. -func (p *Properties) GetInt(key string, def int) int { - v, err := p.getInt64(key) - if err != nil { - return def - } - return intRangeCheck(key, v) -} - -// MustGetInt parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -// If the value does not fit into an int the function panics with -// an out of range error. -func (p *Properties) MustGetInt(key string) int { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return intRangeCheck(key, v) -} - -// ---------------------------------------------------------------------------- - -// GetInt64 parses the expanded value as an int64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetInt64(key string, def int64) int64 { - v, err := p.getInt64(key) - if err != nil { - return def - } - return v -} - -// MustGetInt64 parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetInt64(key string) int64 { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getInt64(key string) (value int64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseInt(v, 10, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetUint parses the expanded value as an uint if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. If the value does not fit into an int the -// function panics with an out of range error. -func (p *Properties) GetUint(key string, def uint) uint { - v, err := p.getUint64(key) - if err != nil { - return def - } - return uintRangeCheck(key, v) -} - -// MustGetUint parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -// If the value does not fit into an int the function panics with -// an out of range error. -func (p *Properties) MustGetUint(key string) uint { - v, err := p.getUint64(key) - if err != nil { - ErrorHandler(err) - } - return uintRangeCheck(key, v) -} - -// ---------------------------------------------------------------------------- - -// GetUint64 parses the expanded value as an uint64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetUint64(key string, def uint64) uint64 { - v, err := p.getUint64(key) - if err != nil { - return def - } - return v -} - -// MustGetUint64 parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetUint64(key string) uint64 { - v, err := p.getUint64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getUint64(key string) (value uint64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseUint(v, 10, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetString returns the expanded value for the given key if exists or -// the default value otherwise. -func (p *Properties) GetString(key, def string) string { - if v, ok := p.Get(key); ok { - return v - } - return def -} - -// MustGetString returns the expanded value for the given key if exists or -// panics otherwise. -func (p *Properties) MustGetString(key string) string { - if v, ok := p.Get(key); ok { - return v - } - ErrorHandler(invalidKeyError(key)) - panic("ErrorHandler should exit") -} - -// ---------------------------------------------------------------------------- - -// Filter returns a new properties object which contains all properties -// for which the key matches the pattern. -func (p *Properties) Filter(pattern string) (*Properties, error) { - re, err := regexp.Compile(pattern) - if err != nil { - return nil, err - } - - return p.FilterRegexp(re), nil -} - -// FilterRegexp returns a new properties object which contains all properties -// for which the key matches the regular expression. -func (p *Properties) FilterRegexp(re *regexp.Regexp) *Properties { - pp := NewProperties() - for _, k := range p.k { - if re.MatchString(k) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) - pp.Set(k, p.m[k]) - } - } - return pp -} - -// FilterPrefix returns a new properties object with a subset of all keys -// with the given prefix. -func (p *Properties) FilterPrefix(prefix string) *Properties { - pp := NewProperties() - for _, k := range p.k { - if strings.HasPrefix(k, prefix) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) - pp.Set(k, p.m[k]) - } - } - return pp -} - -// FilterStripPrefix returns a new properties object with a subset of all keys -// with the given prefix and the prefix removed from the keys. -func (p *Properties) FilterStripPrefix(prefix string) *Properties { - pp := NewProperties() - n := len(prefix) - for _, k := range p.k { - if len(k) > len(prefix) && strings.HasPrefix(k, prefix) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are modifying keys I am not entirely sure whether we can create a circular reference - // TODO(fs): this function should probably return an error but the signature is fixed - pp.Set(k[n:], p.m[k]) - } - } - return pp -} - -// Len returns the number of keys. -func (p *Properties) Len() int { - return len(p.m) -} - -// Keys returns all keys in the same order as in the input. -func (p *Properties) Keys() []string { - keys := make([]string, len(p.k)) - copy(keys, p.k) - return keys -} - -// Set sets the property key to the corresponding value. -// If a value for key existed before then ok is true and prev -// contains the previous value. If the value contains a -// circular reference or a malformed expression then -// an error is returned. -// An empty key is silently ignored. -func (p *Properties) Set(key, value string) (prev string, ok bool, err error) { - if key == "" { - return "", false, nil - } - - // if expansion is disabled we allow circular references - if p.DisableExpansion { - prev, ok = p.Get(key) - p.m[key] = value - return prev, ok, nil - } - - // to check for a circular reference we temporarily need - // to set the new value. If there is an error then revert - // to the previous state. Only if all tests are successful - // then we add the key to the p.k list. - prev, ok = p.Get(key) - p.m[key] = value - - // now check for a circular reference - _, err = p.expand(value) - if err != nil { - - // revert to the previous state - if ok { - p.m[key] = prev - } else { - delete(p.m, key) - } - - return "", false, err - } - - if !ok { - p.k = append(p.k, key) - } - - return prev, ok, nil -} - -// MustSet sets the property key to the corresponding value. -// If a value for key existed before then ok is true and prev -// contains the previous value. An empty key is silently ignored. -func (p *Properties) MustSet(key, value string) (prev string, ok bool) { - prev, ok, err := p.Set(key, value) - if err != nil { - ErrorHandler(err) - } - return prev, ok -} - -// String returns a string of all expanded 'key = value' pairs. -func (p *Properties) String() string { - var s string - for _, key := range p.k { - value, _ := p.Get(key) - s = fmt.Sprintf("%s%s = %s\n", s, key, value) - } - return s -} - -// Write writes all unexpanded 'key = value' pairs to the given writer. -// Write returns the number of bytes written and any write error encountered. -func (p *Properties) Write(w io.Writer, enc Encoding) (n int, err error) { - return p.WriteComment(w, "", enc) -} - -// WriteComment writes all unexpanced 'key = value' pairs to the given writer. -// If prefix is not empty then comments are written with a blank line and the -// given prefix. The prefix should be either "# " or "! " to be compatible with -// the properties file format. Otherwise, the properties parser will not be -// able to read the file back in. It returns the number of bytes written and -// any write error encountered. -func (p *Properties) WriteComment(w io.Writer, prefix string, enc Encoding) (n int, err error) { - var x int - - for _, key := range p.k { - value := p.m[key] - - if prefix != "" { - if comments, ok := p.c[key]; ok { - // don't print comments if they are all empty - allEmpty := true - for _, c := range comments { - if c != "" { - allEmpty = false - break - } - } - - if !allEmpty { - // add a blank line between entries but not at the top - if len(comments) > 0 && n > 0 { - x, err = fmt.Fprintln(w) - if err != nil { - return - } - n += x - } - - for _, c := range comments { - x, err = fmt.Fprintf(w, "%s%s\n", prefix, encode(c, "", enc)) - if err != nil { - return - } - n += x - } - } - } - } - - x, err = fmt.Fprintf(w, "%s = %s\n", encode(key, " :", enc), encode(value, "", enc)) - if err != nil { - return - } - n += x - } - return -} - -// ---------------------------------------------------------------------------- - -// Delete removes the key and its comments. -func (p *Properties) Delete(key string) { - delete(p.m, key) - delete(p.c, key) - newKeys := []string{} - for _, k := range p.k { - if k != key { - newKeys = append(newKeys, k) - } - } - p.k = newKeys -} - -// Merge merges properties, comments and keys from other *Properties into p -func (p *Properties) Merge(other *Properties) { - for k, v := range other.m { - p.m[k] = v - } - for k, v := range other.c { - p.c[k] = v - } - -outer: - for _, otherKey := range other.k { - for _, key := range p.k { - if otherKey == key { - continue outer - } - } - p.k = append(p.k, otherKey) - } -} - -// ---------------------------------------------------------------------------- - -// check expands all values and returns an error if a circular reference or -// a malformed expression was found. -func (p *Properties) check() error { - for _, value := range p.m { - if _, err := p.expand(value); err != nil { - return err - } - } - return nil -} - -func (p *Properties) expand(input string) (string, error) { - // no pre/postfix -> nothing to expand - if p.Prefix == "" && p.Postfix == "" { - return input, nil - } - - return expand(input, make(map[string]bool), p.Prefix, p.Postfix, p.m) -} - -// expand recursively expands expressions of '(prefix)key(postfix)' to their corresponding values. -// The function keeps track of the keys that were already expanded and stops if it -// detects a circular reference or a malformed expression of the form '(prefix)key'. -func expand(s string, keys map[string]bool, prefix, postfix string, values map[string]string) (string, error) { - start := strings.Index(s, prefix) - if start == -1 { - return s, nil - } - - keyStart := start + len(prefix) - keyLen := strings.Index(s[keyStart:], postfix) - if keyLen == -1 { - return "", fmt.Errorf("malformed expression") - } - - end := keyStart + keyLen + len(postfix) - 1 - key := s[keyStart : keyStart+keyLen] - - // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key) - - if _, ok := keys[key]; ok { - return "", fmt.Errorf("circular reference") - } - - val, ok := values[key] - if !ok { - val = os.Getenv(key) - } - - // remember that we've seen the key - keys[key] = true - - return expand(s[:start]+val+s[end+1:], keys, prefix, postfix, values) -} - -// encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters. -func encode(s string, special string, enc Encoding) string { - switch enc { - case UTF8: - return encodeUtf8(s, special) - case ISO_8859_1: - return encodeIso(s, special) - default: - panic(fmt.Sprintf("unsupported encoding %v", enc)) - } -} - -func encodeUtf8(s string, special string) string { - v := "" - for pos := 0; pos < len(s); { - r, w := utf8.DecodeRuneInString(s[pos:]) - pos += w - v += escape(r, special) - } - return v -} - -func encodeIso(s string, special string) string { - var r rune - var w int - var v string - for pos := 0; pos < len(s); { - switch r, w = utf8.DecodeRuneInString(s[pos:]); { - case r < 1<<8: // single byte rune -> escape special chars only - v += escape(r, special) - case r < 1<<16: // two byte rune -> unicode literal - v += fmt.Sprintf("\\u%04x", r) - default: // more than two bytes per rune -> can't encode - v += "?" - } - pos += w - } - return v -} - -func escape(r rune, special string) string { - switch r { - case '\f': - return "\\f" - case '\n': - return "\\n" - case '\r': - return "\\r" - case '\t': - return "\\t" - default: - if strings.ContainsRune(special, r) { - return "\\" + string(r) - } - return string(r) - } -} - -func invalidKeyError(key string) error { - return fmt.Errorf("unknown property: %s", key) -} diff --git a/vendor/github.com/magiconair/properties/rangecheck.go b/vendor/github.com/magiconair/properties/rangecheck.go deleted file mode 100644 index 2e907d540b..0000000000 --- a/vendor/github.com/magiconair/properties/rangecheck.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "math" -) - -// make this a var to overwrite it in a test -var is32Bit = ^uint(0) == math.MaxUint32 - -// intRangeCheck checks if the value fits into the int type and -// panics if it does not. -func intRangeCheck(key string, v int64) int { - if is32Bit && (v < math.MinInt32 || v > math.MaxInt32) { - panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) - } - return int(v) -} - -// uintRangeCheck checks if the value fits into the uint type and -// panics if it does not. -func uintRangeCheck(key string, v uint64) uint { - if is32Bit && v > math.MaxUint32 { - panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) - } - return uint(v) -} diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md deleted file mode 100644 index 659d6885fc..0000000000 --- a/vendor/github.com/mitchellh/mapstructure/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# mapstructure - -mapstructure is a Go library for decoding generic map values to structures -and vice versa, while providing helpful error handling. - -This library is most useful when decoding values from some data stream (JSON, -Gob, etc.) where you don't _quite_ know the structure of the underlying data -until you read a part of it. You can therefore read a `map[string]interface{}` -and use this library to decode it into the proper underlying native Go -structure. - -## Installation - -Standard `go get`: - -``` -$ go get github.com/mitchellh/mapstructure -``` - -## Usage & Example - -For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/mapstructure). - -The `Decode` function has examples associated with it there. - -## But Why?! - -Go offers fantastic standard libraries for decoding formats such as JSON. -The standard method is to have a struct pre-created, and populate that struct -from the bytes of the encoded format. This is great, but the problem is if -you have configuration or an encoding that changes slightly depending on -specific fields. For example, consider this JSON: - -```json -{ - "type": "person", - "name": "Mitchell" -} -``` - -Perhaps we can't populate a specific structure without first reading -the "type" field from the JSON. We could always do two passes over the -decoding of the JSON (reading the "type" first, and the rest later). -However, it is much simpler to just decode this into a `map[string]interface{}` -structure, read the "type" key, then use something like this library -to decode it into the proper structure. diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go deleted file mode 100644 index 115ae67c11..0000000000 --- a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go +++ /dev/null @@ -1,154 +0,0 @@ -package mapstructure - -import ( - "errors" - "reflect" - "strconv" - "strings" - "time" -) - -// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns -// it into the proper DecodeHookFunc type, such as DecodeHookFuncType. -func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc { - // Create variables here so we can reference them with the reflect pkg - var f1 DecodeHookFuncType - var f2 DecodeHookFuncKind - - // Fill in the variables into this interface and the rest is done - // automatically using the reflect package. - potential := []interface{}{f1, f2} - - v := reflect.ValueOf(h) - vt := v.Type() - for _, raw := range potential { - pt := reflect.ValueOf(raw).Type() - if vt.ConvertibleTo(pt) { - return v.Convert(pt).Interface() - } - } - - return nil -} - -// DecodeHookExec executes the given decode hook. This should be used -// since it'll naturally degrade to the older backwards compatible DecodeHookFunc -// that took reflect.Kind instead of reflect.Type. -func DecodeHookExec( - raw DecodeHookFunc, - from reflect.Type, to reflect.Type, - data interface{}) (interface{}, error) { - // Build our arguments that reflect expects - argVals := make([]reflect.Value, 3) - argVals[0] = reflect.ValueOf(from) - argVals[1] = reflect.ValueOf(to) - argVals[2] = reflect.ValueOf(data) - - switch f := typedDecodeHook(raw).(type) { - case DecodeHookFuncType: - return f(from, to, data) - case DecodeHookFuncKind: - return f(from.Kind(), to.Kind(), data) - default: - return nil, errors.New("invalid decode hook signature") - } -} - -// ComposeDecodeHookFunc creates a single DecodeHookFunc that -// automatically composes multiple DecodeHookFuncs. -// -// The composed funcs are called in order, with the result of the -// previous transformation. -func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - var err error - for _, f1 := range fs { - data, err = DecodeHookExec(f1, f, t, data) - if err != nil { - return nil, err - } - - // Modify the from kind to be correct with the new data - f = nil - if val := reflect.ValueOf(data); val.IsValid() { - f = val.Type() - } - } - - return data, nil - } -} - -// StringToSliceHookFunc returns a DecodeHookFunc that converts -// string to []string by splitting on the given sep. -func StringToSliceHookFunc(sep string) DecodeHookFunc { - return func( - f reflect.Kind, - t reflect.Kind, - data interface{}) (interface{}, error) { - if f != reflect.String || t != reflect.Slice { - return data, nil - } - - raw := data.(string) - if raw == "" { - return []string{}, nil - } - - return strings.Split(raw, sep), nil - } -} - -// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts -// strings to time.Duration. -func StringToTimeDurationHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(time.Duration(5)) { - return data, nil - } - - // Convert it by parsing - return time.ParseDuration(data.(string)) - } -} - -func WeaklyTypedHook( - f reflect.Kind, - t reflect.Kind, - data interface{}) (interface{}, error) { - dataVal := reflect.ValueOf(data) - switch t { - case reflect.String: - switch f { - case reflect.Bool: - if dataVal.Bool() { - return "1", nil - } else { - return "0", nil - } - case reflect.Float32: - return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil - case reflect.Int: - return strconv.FormatInt(dataVal.Int(), 10), nil - case reflect.Slice: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - if elemKind == reflect.Uint8 { - return string(dataVal.Interface().([]uint8)), nil - } - case reflect.Uint: - return strconv.FormatUint(dataVal.Uint(), 10), nil - } - } - - return data, nil -} diff --git a/vendor/github.com/mitchellh/mapstructure/error.go b/vendor/github.com/mitchellh/mapstructure/error.go deleted file mode 100644 index 47a99e5af3..0000000000 --- a/vendor/github.com/mitchellh/mapstructure/error.go +++ /dev/null @@ -1,50 +0,0 @@ -package mapstructure - -import ( - "errors" - "fmt" - "sort" - "strings" -) - -// Error implements the error interface and can represents multiple -// errors that occur in the course of a single decode. -type Error struct { - Errors []string -} - -func (e *Error) Error() string { - points := make([]string, len(e.Errors)) - for i, err := range e.Errors { - points[i] = fmt.Sprintf("* %s", err) - } - - sort.Strings(points) - return fmt.Sprintf( - "%d error(s) decoding:\n\n%s", - len(e.Errors), strings.Join(points, "\n")) -} - -// WrappedErrors implements the errwrap.Wrapper interface to make this -// return value more useful with the errwrap and go-multierror libraries. -func (e *Error) WrappedErrors() []error { - if e == nil { - return nil - } - - result := make([]error, len(e.Errors)) - for i, e := range e.Errors { - result[i] = errors.New(e) - } - - return result -} - -func appendErrors(errors []string, err error) []string { - switch e := err.(type) { - case *Error: - return append(errors, e.Errors...) - default: - return append(errors, e.Error()) - } -} diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go deleted file mode 100644 index 15959821b7..0000000000 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure.go +++ /dev/null @@ -1,809 +0,0 @@ -// The mapstructure package exposes functionality to convert an -// arbitrary map[string]interface{} into a native Go structure. -// -// The Go structure can be arbitrarily complex, containing slices, -// other structs, etc. and the decoder will properly decode nested -// maps and so on into the proper structures in the native Go struct. -// See the examples to see what the decoder is capable of. -package mapstructure - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "sort" - "strconv" - "strings" -) - -// DecodeHookFunc is the callback function that can be used for -// data transformations. See "DecodeHook" in the DecoderConfig -// struct. -// -// The type should be DecodeHookFuncType or DecodeHookFuncKind. -// Either is accepted. Types are a superset of Kinds (Types can return -// Kinds) and are generally a richer thing to use, but Kinds are simpler -// if you only need those. -// -// The reason DecodeHookFunc is multi-typed is for backwards compatibility: -// we started with Kinds and then realized Types were the better solution, -// but have a promise to not break backwards compat so we now support -// both. -type DecodeHookFunc interface{} - -type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error) -type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error) - -// DecoderConfig is the configuration that is used to create a new decoder -// and allows customization of various aspects of decoding. -type DecoderConfig struct { - // DecodeHook, if set, will be called before any decoding and any - // type conversion (if WeaklyTypedInput is on). This lets you modify - // the values before they're set down onto the resulting struct. - // - // If an error is returned, the entire decode will fail with that - // error. - DecodeHook DecodeHookFunc - - // If ErrorUnused is true, then it is an error for there to exist - // keys in the original map that were unused in the decoding process - // (extra keys). - ErrorUnused bool - - // ZeroFields, if set to true, will zero fields before writing them. - // For example, a map will be emptied before decoded values are put in - // it. If this is false, a map will be merged. - ZeroFields bool - - // If WeaklyTypedInput is true, the decoder will make the following - // "weak" conversions: - // - // - bools to string (true = "1", false = "0") - // - numbers to string (base 10) - // - bools to int/uint (true = 1, false = 0) - // - strings to int/uint (base implied by prefix) - // - int to bool (true if value != 0) - // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F, - // FALSE, false, False. Anything else is an error) - // - empty array = empty map and vice versa - // - negative numbers to overflowed uint values (base 10) - // - slice of maps to a merged map - // - WeaklyTypedInput bool - - // Metadata is the struct that will contain extra metadata about - // the decoding. If this is nil, then no metadata will be tracked. - Metadata *Metadata - - // Result is a pointer to the struct that will contain the decoded - // value. - Result interface{} - - // The tag name that mapstructure reads for field names. This - // defaults to "mapstructure" - TagName string -} - -// A Decoder takes a raw interface value and turns it into structured -// data, keeping track of rich error information along the way in case -// anything goes wrong. Unlike the basic top-level Decode method, you can -// more finely control how the Decoder behaves using the DecoderConfig -// structure. The top-level Decode method is just a convenience that sets -// up the most basic Decoder. -type Decoder struct { - config *DecoderConfig -} - -// Metadata contains information about decoding a structure that -// is tedious or difficult to get otherwise. -type Metadata struct { - // Keys are the keys of the structure which were successfully decoded - Keys []string - - // Unused is a slice of keys that were found in the raw value but - // weren't decoded since there was no matching field in the result interface - Unused []string -} - -// Decode takes a map and uses reflection to convert it into the -// given Go native structure. val must be a pointer to a struct. -func Decode(m interface{}, rawVal interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: rawVal, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(m) -} - -// WeakDecode is the same as Decode but is shorthand to enable -// WeaklyTypedInput. See DecoderConfig for more info. -func WeakDecode(input, output interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: output, - WeaklyTypedInput: true, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// NewDecoder returns a new decoder for the given configuration. Once -// a decoder has been returned, the same configuration must not be used -// again. -func NewDecoder(config *DecoderConfig) (*Decoder, error) { - val := reflect.ValueOf(config.Result) - if val.Kind() != reflect.Ptr { - return nil, errors.New("result must be a pointer") - } - - val = val.Elem() - if !val.CanAddr() { - return nil, errors.New("result must be addressable (a pointer)") - } - - if config.Metadata != nil { - if config.Metadata.Keys == nil { - config.Metadata.Keys = make([]string, 0) - } - - if config.Metadata.Unused == nil { - config.Metadata.Unused = make([]string, 0) - } - } - - if config.TagName == "" { - config.TagName = "mapstructure" - } - - result := &Decoder{ - config: config, - } - - return result, nil -} - -// Decode decodes the given raw interface to the target pointer specified -// by the configuration. -func (d *Decoder) Decode(raw interface{}) error { - return d.decode("", raw, reflect.ValueOf(d.config.Result).Elem()) -} - -// Decodes an unknown data type into a specific reflection value. -func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error { - if data == nil { - // If the data is nil, then we don't set anything. - return nil - } - - dataVal := reflect.ValueOf(data) - if !dataVal.IsValid() { - // If the data value is invalid, then we just set the value - // to be the zero value. - val.Set(reflect.Zero(val.Type())) - return nil - } - - if d.config.DecodeHook != nil { - // We have a DecodeHook, so let's pre-process the data. - var err error - data, err = DecodeHookExec( - d.config.DecodeHook, - dataVal.Type(), val.Type(), data) - if err != nil { - return fmt.Errorf("error decoding '%s': %s", name, err) - } - } - - var err error - dataKind := getKind(val) - switch dataKind { - case reflect.Bool: - err = d.decodeBool(name, data, val) - case reflect.Interface: - err = d.decodeBasic(name, data, val) - case reflect.String: - err = d.decodeString(name, data, val) - case reflect.Int: - err = d.decodeInt(name, data, val) - case reflect.Uint: - err = d.decodeUint(name, data, val) - case reflect.Float32: - err = d.decodeFloat(name, data, val) - case reflect.Struct: - err = d.decodeStruct(name, data, val) - case reflect.Map: - err = d.decodeMap(name, data, val) - case reflect.Ptr: - err = d.decodePtr(name, data, val) - case reflect.Slice: - err = d.decodeSlice(name, data, val) - case reflect.Func: - err = d.decodeFunc(name, data, val) - default: - // If we reached this point then we weren't able to decode it - return fmt.Errorf("%s: unsupported type: %s", name, dataKind) - } - - // If we reached here, then we successfully decoded SOMETHING, so - // mark the key as used if we're tracking metadata. - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - - return err -} - -// This decodes a basic type (bool, int, string, etc.) and sets the -// value to "data" of that type. -func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - if !dataVal.IsValid() { - dataVal = reflect.Zero(val.Type()) - } - - dataValType := dataVal.Type() - if !dataValType.AssignableTo(val.Type()) { - return fmt.Errorf( - "'%s' expected type '%s', got '%s'", - name, val.Type(), dataValType) - } - - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - dataKind := getKind(dataVal) - - converted := true - switch { - case dataKind == reflect.String: - val.SetString(dataVal.String()) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetString("1") - } else { - val.SetString("0") - } - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatInt(dataVal.Int(), 10)) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) - case dataKind == reflect.Slice && d.config.WeaklyTypedInput: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - switch { - case elemKind == reflect.Uint8: - val.SetString(string(dataVal.Interface().([]uint8))) - default: - converted = false - } - default: - converted = false - } - - if !converted { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - - return nil -} - -func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetInt(dataVal.Int()) - case dataKind == reflect.Uint: - val.SetInt(int64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetInt(int64(dataVal.Float())) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetInt(1) - } else { - val.SetInt(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - i, err := strconv.ParseInt(dataVal.String(), 0, val.Type().Bits()) - if err == nil { - val.SetInt(i) - } else { - return fmt.Errorf("cannot parse '%s' as int: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Int64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetInt(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - - return nil -} - -func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - dataKind := getKind(dataVal) - - switch { - case dataKind == reflect.Int: - i := dataVal.Int() - if i < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %d overflows uint", - name, i) - } - val.SetUint(uint64(i)) - case dataKind == reflect.Uint: - val.SetUint(dataVal.Uint()) - case dataKind == reflect.Float32: - f := dataVal.Float() - if f < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %f overflows uint", - name, f) - } - val.SetUint(uint64(f)) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetUint(1) - } else { - val.SetUint(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - i, err := strconv.ParseUint(dataVal.String(), 0, val.Type().Bits()) - if err == nil { - val.SetUint(i) - } else { - return fmt.Errorf("cannot parse '%s' as uint: %s", name, err) - } - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - - return nil -} - -func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - dataKind := getKind(dataVal) - - switch { - case dataKind == reflect.Bool: - val.SetBool(dataVal.Bool()) - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Int() != 0) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Uint() != 0) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Float() != 0) - case dataKind == reflect.String && d.config.WeaklyTypedInput: - b, err := strconv.ParseBool(dataVal.String()) - if err == nil { - val.SetBool(b) - } else if dataVal.String() == "" { - val.SetBool(false) - } else { - return fmt.Errorf("cannot parse '%s' as bool: %s", name, err) - } - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - - return nil -} - -func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetFloat(float64(dataVal.Int())) - case dataKind == reflect.Uint: - val.SetFloat(float64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetFloat(float64(dataVal.Float())) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetFloat(1) - } else { - val.SetFloat(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - f, err := strconv.ParseFloat(dataVal.String(), val.Type().Bits()) - if err == nil { - val.SetFloat(f) - } else { - return fmt.Errorf("cannot parse '%s' as float: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Float64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetFloat(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - - return nil -} - -func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error { - valType := val.Type() - valKeyType := valType.Key() - valElemType := valType.Elem() - - // By default we overwrite keys in the current map - valMap := val - - // If the map is nil or we're purposely zeroing fields, make a new map - if valMap.IsNil() || d.config.ZeroFields { - // Make a new map to hold our result - mapType := reflect.MapOf(valKeyType, valElemType) - valMap = reflect.MakeMap(mapType) - } - - // Check input type - dataVal := reflect.Indirect(reflect.ValueOf(data)) - if dataVal.Kind() != reflect.Map { - // In weak mode, we accept a slice of maps as an input... - if d.config.WeaklyTypedInput { - switch dataVal.Kind() { - case reflect.Array, reflect.Slice: - // Special case for BC reasons (covered by tests) - if dataVal.Len() == 0 { - val.Set(valMap) - return nil - } - - for i := 0; i < dataVal.Len(); i++ { - err := d.decode( - fmt.Sprintf("%s[%d]", name, i), - dataVal.Index(i).Interface(), val) - if err != nil { - return err - } - } - - return nil - } - } - - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) - } - - // Accumulate errors - errors := make([]string, 0) - - for _, k := range dataVal.MapKeys() { - fieldName := fmt.Sprintf("%s[%s]", name, k) - - // First decode the key into the proper type - currentKey := reflect.Indirect(reflect.New(valKeyType)) - if err := d.decode(fieldName, k.Interface(), currentKey); err != nil { - errors = appendErrors(errors, err) - continue - } - - // Next decode the data into the proper type - v := dataVal.MapIndex(k).Interface() - currentVal := reflect.Indirect(reflect.New(valElemType)) - if err := d.decode(fieldName, v, currentVal); err != nil { - errors = appendErrors(errors, err) - continue - } - - valMap.SetMapIndex(currentKey, currentVal) - } - - // Set the built up map to the value - val.Set(valMap) - - // If we had errors, return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - valType := val.Type() - valElemType := valType.Elem() - - realVal := val - if realVal.IsNil() || d.config.ZeroFields { - realVal = reflect.New(valElemType) - } - - if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { - return err - } - - val.Set(realVal) - return nil -} - -func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - dataVal := reflect.Indirect(reflect.ValueOf(data)) - if val.Type() != dataVal.Type() { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s'", - name, val.Type(), dataVal.Type()) - } - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataValKind := dataVal.Kind() - valType := val.Type() - valElemType := valType.Elem() - sliceType := reflect.SliceOf(valElemType) - - valSlice := val - if valSlice.IsNil() || d.config.ZeroFields { - - // Check input type - if dataValKind != reflect.Array && dataValKind != reflect.Slice { - // Accept empty map instead of array/slice in weakly typed mode - if d.config.WeaklyTypedInput && dataVal.Kind() == reflect.Map && dataVal.Len() == 0 { - val.Set(reflect.MakeSlice(sliceType, 0, 0)) - return nil - } else { - return fmt.Errorf( - "'%s': source data must be an array or slice, got %s", name, dataValKind) - } - } - - // Make a new slice to hold our result, same size as the original data. - valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) - } - - // Accumulate any errors - errors := make([]string, 0) - - for i := 0; i < dataVal.Len(); i++ { - currentData := dataVal.Index(i).Interface() - for valSlice.Len() <= i { - valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) - } - currentField := valSlice.Index(i) - - fieldName := fmt.Sprintf("%s[%d]", name, i) - if err := d.decode(fieldName, currentData, currentField); err != nil { - errors = appendErrors(errors, err) - } - } - - // Finally, set the value to the slice we built up - val.Set(valSlice) - - // If there were errors, we return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - - // If the type of the value to write to and the data match directly, - // then we just set it directly instead of recursing into the structure. - if dataVal.Type() == val.Type() { - val.Set(dataVal) - return nil - } - - dataValKind := dataVal.Kind() - if dataValKind != reflect.Map { - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataValKind) - } - - dataValType := dataVal.Type() - if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { - return fmt.Errorf( - "'%s' needs a map with string keys, has '%s' keys", - name, dataValType.Key().Kind()) - } - - dataValKeys := make(map[reflect.Value]struct{}) - dataValKeysUnused := make(map[interface{}]struct{}) - for _, dataValKey := range dataVal.MapKeys() { - dataValKeys[dataValKey] = struct{}{} - dataValKeysUnused[dataValKey.Interface()] = struct{}{} - } - - errors := make([]string, 0) - - // This slice will keep track of all the structs we'll be decoding. - // There can be more than one struct if there are embedded structs - // that are squashed. - structs := make([]reflect.Value, 1, 5) - structs[0] = val - - // Compile the list of all the fields that we're going to be decoding - // from all the structs. - fields := make(map[*reflect.StructField]reflect.Value) - for len(structs) > 0 { - structVal := structs[0] - structs = structs[1:] - - structType := structVal.Type() - - for i := 0; i < structType.NumField(); i++ { - fieldType := structType.Field(i) - fieldKind := fieldType.Type.Kind() - - // If "squash" is specified in the tag, we squash the field down. - squash := false - tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",") - for _, tag := range tagParts[1:] { - if tag == "squash" { - squash = true - break - } - } - - if squash { - if fieldKind != reflect.Struct { - errors = appendErrors(errors, - fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldKind)) - } else { - structs = append(structs, val.FieldByName(fieldType.Name)) - } - continue - } - - // Normal struct field, store it away - fields[&fieldType] = structVal.Field(i) - } - } - - for fieldType, field := range fields { - fieldName := fieldType.Name - - tagValue := fieldType.Tag.Get(d.config.TagName) - tagValue = strings.SplitN(tagValue, ",", 2)[0] - if tagValue != "" { - fieldName = tagValue - } - - rawMapKey := reflect.ValueOf(fieldName) - rawMapVal := dataVal.MapIndex(rawMapKey) - if !rawMapVal.IsValid() { - // Do a slower search by iterating over each key and - // doing case-insensitive search. - for dataValKey := range dataValKeys { - mK, ok := dataValKey.Interface().(string) - if !ok { - // Not a string key - continue - } - - if strings.EqualFold(mK, fieldName) { - rawMapKey = dataValKey - rawMapVal = dataVal.MapIndex(dataValKey) - break - } - } - - if !rawMapVal.IsValid() { - // There was no matching key in the map for the value in - // the struct. Just ignore. - continue - } - } - - // Delete the key we're using from the unused map so we stop tracking - delete(dataValKeysUnused, rawMapKey.Interface()) - - if !field.IsValid() { - // This should never happen - panic("field is not valid") - } - - // If we can't set the field, then it is unexported or something, - // and we just continue onwards. - if !field.CanSet() { - continue - } - - // If the name is empty string, then we're at the root, and we - // don't dot-join the fields. - if name != "" { - fieldName = fmt.Sprintf("%s.%s", name, fieldName) - } - - if err := d.decode(fieldName, rawMapVal.Interface(), field); err != nil { - errors = appendErrors(errors, err) - } - } - - if d.config.ErrorUnused && len(dataValKeysUnused) > 0 { - keys := make([]string, 0, len(dataValKeysUnused)) - for rawKey := range dataValKeysUnused { - keys = append(keys, rawKey.(string)) - } - sort.Strings(keys) - - err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", ")) - errors = appendErrors(errors, err) - } - - if len(errors) > 0 { - return &Error{errors} - } - - // Add the unused keys to the list of unused keys if we're tracking metadata - if d.config.Metadata != nil { - for rawKey := range dataValKeysUnused { - key := rawKey.(string) - if name != "" { - key = fmt.Sprintf("%s.%s", name, key) - } - - d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) - } - } - - return nil -} - -func getKind(val reflect.Value) reflect.Kind { - kind := val.Kind() - - switch { - case kind >= reflect.Int && kind <= reflect.Int64: - return reflect.Int - case kind >= reflect.Uint && kind <= reflect.Uint64: - return reflect.Uint - case kind >= reflect.Float32 && kind <= reflect.Float64: - return reflect.Float32 - default: - return kind - } -} diff --git a/vendor/github.com/mitchellh/mapstructure/LICENSE b/vendor/github.com/nicksnyder/go-i18n/LICENSE similarity index 94% rename from vendor/github.com/mitchellh/mapstructure/LICENSE rename to vendor/github.com/nicksnyder/go-i18n/LICENSE index f9c841a51e..609cce7976 100644 --- a/vendor/github.com/mitchellh/mapstructure/LICENSE +++ b/vendor/github.com/nicksnyder/go-i18n/LICENSE @@ -1,6 +1,4 @@ -The MIT License (MIT) - -Copyright (c) 2013 Mitchell Hashimoto +Copyright (c) 2014 Nick Snyder https://github.com/nicksnyder Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/bundle/bundle.go b/vendor/github.com/nicksnyder/go-i18n/i18n/bundle/bundle.go new file mode 100644 index 0000000000..a2291362fe --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/bundle/bundle.go @@ -0,0 +1,444 @@ +// Package bundle manages translations for multiple languages. +package bundle + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + "sync" + "unicode" + + "github.com/nicksnyder/go-i18n/i18n/language" + "github.com/nicksnyder/go-i18n/i18n/translation" + toml "github.com/pelletier/go-toml" + "gopkg.in/yaml.v2" +) + +// TranslateFunc is a copy of i18n.TranslateFunc to avoid a circular dependency. +type TranslateFunc func(translationID string, args ...interface{}) string + +// Bundle stores the translations for multiple languages. +type Bundle struct { + // The primary translations for a language tag and translation id. + translations map[string]map[string]translation.Translation + + // Translations that can be used when an exact language match is not possible. + fallbackTranslations map[string]map[string]translation.Translation + + sync.RWMutex +} + +// New returns an empty bundle. +func New() *Bundle { + return &Bundle{ + translations: make(map[string]map[string]translation.Translation), + fallbackTranslations: make(map[string]map[string]translation.Translation), + } +} + +// MustLoadTranslationFile is similar to LoadTranslationFile +// except it panics if an error happens. +func (b *Bundle) MustLoadTranslationFile(filename string) { + if err := b.LoadTranslationFile(filename); err != nil { + panic(err) + } +} + +// LoadTranslationFile loads the translations from filename into memory. +// +// The language that the translations are associated with is parsed from the filename (e.g. en-US.json). +// +// Generally you should load translation files once during your program's initialization. +func (b *Bundle) LoadTranslationFile(filename string) error { + buf, err := ioutil.ReadFile(filename) + if err != nil { + return err + } + return b.ParseTranslationFileBytes(filename, buf) +} + +// ParseTranslationFileBytes is similar to LoadTranslationFile except it parses the bytes in buf. +// +// It is useful for parsing translation files embedded with go-bindata. +func (b *Bundle) ParseTranslationFileBytes(filename string, buf []byte) error { + basename := filepath.Base(filename) + langs := language.Parse(basename) + switch l := len(langs); { + case l == 0: + return fmt.Errorf("no language found in %q", basename) + case l > 1: + return fmt.Errorf("multiple languages found in filename %q: %v; expected one", basename, langs) + } + translations, err := parseTranslations(filename, buf) + if err != nil { + return err + } + b.AddTranslation(langs[0], translations...) + return nil +} + +func parseTranslations(filename string, buf []byte) ([]translation.Translation, error) { + if len(buf) == 0 { + return []translation.Translation{}, nil + } + + ext := filepath.Ext(filename) + + // `github.com/pelletier/go-toml` lacks an Unmarshal function, + // so we should parse TOML separately. + if ext == ".toml" { + tree, err := toml.LoadReader(bytes.NewReader(buf)) + if err != nil { + return nil, err + } + + m := make(map[string]map[string]interface{}) + for k, v := range tree.ToMap() { + m[k] = v.(map[string]interface{}) + } + + return parseFlatFormat(m) + } + + // Then parse other formats. + if isStandardFormat(ext, buf) { + var standardFormat []map[string]interface{} + if err := unmarshal(ext, buf, &standardFormat); err != nil { + return nil, fmt.Errorf("failed to unmarshal %v: %v", filename, err) + } + return parseStandardFormat(standardFormat) + } else { + var flatFormat map[string]map[string]interface{} + if err := unmarshal(ext, buf, &flatFormat); err != nil { + return nil, fmt.Errorf("failed to unmarshal %v: %v", filename, err) + } + return parseFlatFormat(flatFormat) + } +} + +func isStandardFormat(ext string, buf []byte) bool { + buf = deleteLeadingComments(ext, buf) + firstRune := rune(buf[0]) + return (ext == ".json" && firstRune == '[') || (ext == ".yaml" && firstRune == '-') +} + +// deleteLeadingComments deletes leading newlines and comments in buf. +// It only works for ext == ".yaml". +func deleteLeadingComments(ext string, buf []byte) []byte { + if ext != ".yaml" { + return buf + } + + for { + buf = bytes.TrimLeftFunc(buf, unicode.IsSpace) + if buf[0] == '#' { + buf = deleteLine(buf) + } else { + break + } + } + + return buf +} + +func deleteLine(buf []byte) []byte { + index := bytes.IndexRune(buf, '\n') + if index == -1 { // If there is only one line without newline ... + return nil // ... delete it and return nothing. + } + if index == len(buf)-1 { // If there is only one line with newline ... + return nil // ... do the same as above. + } + return buf[index+1:] +} + +// unmarshal finds an appropriate unmarshal function for ext +// (extension of filename) and unmarshals buf to out. out must be a pointer. +func unmarshal(ext string, buf []byte, out interface{}) error { + switch ext { + case ".json": + return json.Unmarshal(buf, out) + case ".yaml": + return yaml.Unmarshal(buf, out) + } + + return fmt.Errorf("unsupported file extension %v", ext) +} + +func parseStandardFormat(data []map[string]interface{}) ([]translation.Translation, error) { + translations := make([]translation.Translation, 0, len(data)) + for i, translationData := range data { + t, err := translation.NewTranslation(translationData) + if err != nil { + return nil, fmt.Errorf("unable to parse translation #%d because %s\n%v", i, err, translationData) + } + translations = append(translations, t) + } + return translations, nil +} + +// parseFlatFormat just converts data from flat format to standard format +// and passes it to parseStandardFormat. +// +// Flat format logic: +// key of data must be a string and data[key] must be always map[string]interface{}, +// but if there is only "other" key in it then it is non-plural, else plural. +func parseFlatFormat(data map[string]map[string]interface{}) ([]translation.Translation, error) { + var standardFormatData []map[string]interface{} + for id, translationData := range data { + dataObject := make(map[string]interface{}) + dataObject["id"] = id + if len(translationData) == 1 { // non-plural form + _, otherExists := translationData["other"] + if otherExists { + dataObject["translation"] = translationData["other"] + } + } else { // plural form + dataObject["translation"] = translationData + } + + standardFormatData = append(standardFormatData, dataObject) + } + + return parseStandardFormat(standardFormatData) +} + +// AddTranslation adds translations for a language. +// +// It is useful if your translations are in a format not supported by LoadTranslationFile. +func (b *Bundle) AddTranslation(lang *language.Language, translations ...translation.Translation) { + b.Lock() + defer b.Unlock() + if b.translations[lang.Tag] == nil { + b.translations[lang.Tag] = make(map[string]translation.Translation, len(translations)) + } + currentTranslations := b.translations[lang.Tag] + for _, newTranslation := range translations { + if currentTranslation := currentTranslations[newTranslation.ID()]; currentTranslation != nil { + currentTranslations[newTranslation.ID()] = currentTranslation.Merge(newTranslation) + } else { + currentTranslations[newTranslation.ID()] = newTranslation + } + } + + // lang can provide translations for less specific language tags. + for _, tag := range lang.MatchingTags() { + b.fallbackTranslations[tag] = currentTranslations + } +} + +// Translations returns all translations in the bundle. +func (b *Bundle) Translations() map[string]map[string]translation.Translation { + t := make(map[string]map[string]translation.Translation) + b.RLock() + for tag, translations := range b.translations { + t[tag] = make(map[string]translation.Translation) + for id, translation := range translations { + t[tag][id] = translation + } + } + b.RUnlock() + return t +} + +// LanguageTags returns the tags of all languages that that have been added. +func (b *Bundle) LanguageTags() []string { + var tags []string + b.RLock() + for k := range b.translations { + tags = append(tags, k) + } + b.RUnlock() + return tags +} + +// LanguageTranslationIDs returns the ids of all translations that have been added for a given language. +func (b *Bundle) LanguageTranslationIDs(languageTag string) []string { + var ids []string + b.RLock() + for id := range b.translations[languageTag] { + ids = append(ids, id) + } + b.RUnlock() + return ids +} + +// MustTfunc is similar to Tfunc except it panics if an error happens. +func (b *Bundle) MustTfunc(pref string, prefs ...string) TranslateFunc { + tfunc, err := b.Tfunc(pref, prefs...) + if err != nil { + panic(err) + } + return tfunc +} + +// MustTfuncAndLanguage is similar to TfuncAndLanguage except it panics if an error happens. +func (b *Bundle) MustTfuncAndLanguage(pref string, prefs ...string) (TranslateFunc, *language.Language) { + tfunc, language, err := b.TfuncAndLanguage(pref, prefs...) + if err != nil { + panic(err) + } + return tfunc, language +} + +// Tfunc is similar to TfuncAndLanguage except is doesn't return the Language. +func (b *Bundle) Tfunc(pref string, prefs ...string) (TranslateFunc, error) { + tfunc, _, err := b.TfuncAndLanguage(pref, prefs...) + return tfunc, err +} + +// TfuncAndLanguage returns a TranslateFunc for the first Language that +// has a non-zero number of translations in the bundle. +// +// The returned Language matches the the first language preference that could be satisfied, +// but this may not strictly match the language of the translations used to satisfy that preference. +// +// For example, the user may request "zh". If there are no translations for "zh" but there are translations +// for "zh-cn", then the translations for "zh-cn" will be used but the returned Language will be "zh". +// +// It can parse languages from Accept-Language headers (RFC 2616), +// but it assumes weights are monotonically decreasing. +func (b *Bundle) TfuncAndLanguage(pref string, prefs ...string) (TranslateFunc, *language.Language, error) { + lang := b.supportedLanguage(pref, prefs...) + var err error + if lang == nil { + err = fmt.Errorf("no supported languages found %#v", append(prefs, pref)) + } + return func(translationID string, args ...interface{}) string { + return b.translate(lang, translationID, args...) + }, lang, err +} + +// supportedLanguage returns the first language which +// has a non-zero number of translations in the bundle. +func (b *Bundle) supportedLanguage(pref string, prefs ...string) *language.Language { + lang := b.translatedLanguage(pref) + if lang == nil { + for _, pref := range prefs { + lang = b.translatedLanguage(pref) + if lang != nil { + break + } + } + } + return lang +} + +func (b *Bundle) translatedLanguage(src string) *language.Language { + langs := language.Parse(src) + b.RLock() + defer b.RUnlock() + for _, lang := range langs { + if len(b.translations[lang.Tag]) > 0 || + len(b.fallbackTranslations[lang.Tag]) > 0 { + return lang + } + } + return nil +} + +func (b *Bundle) translate(lang *language.Language, translationID string, args ...interface{}) string { + if lang == nil { + return translationID + } + + translation := b.translation(lang, translationID) + if translation == nil { + return translationID + } + + var data interface{} + var count interface{} + if argc := len(args); argc > 0 { + if isNumber(args[0]) { + count = args[0] + if argc > 1 { + data = args[1] + } + } else { + data = args[0] + } + } + + if count != nil { + if data == nil { + data = map[string]interface{}{"Count": count} + } else { + dataMap := toMap(data) + dataMap["Count"] = count + data = dataMap + } + } else { + dataMap := toMap(data) + if c, ok := dataMap["Count"]; ok { + count = c + } + } + + p, _ := lang.Plural(count) + template := translation.Template(p) + if template == nil { + return translationID + } + + s := template.Execute(data) + if s == "" { + return translationID + } + return s +} + +func (b *Bundle) translation(lang *language.Language, translationID string) translation.Translation { + b.RLock() + defer b.RUnlock() + translations := b.translations[lang.Tag] + if translations == nil { + translations = b.fallbackTranslations[lang.Tag] + if translations == nil { + return nil + } + } + return translations[translationID] +} + +func isNumber(n interface{}) bool { + switch n.(type) { + case int, int8, int16, int32, int64, string: + return true + } + return false +} + +func toMap(input interface{}) map[string]interface{} { + if data, ok := input.(map[string]interface{}); ok { + return data + } + v := reflect.ValueOf(input) + switch v.Kind() { + case reflect.Ptr: + return toMap(v.Elem().Interface()) + case reflect.Struct: + return structToMap(v) + default: + return nil + } +} + +// Converts the top level of a struct to a map[string]interface{}. +// Code inspired by github.com/fatih/structs. +func structToMap(v reflect.Value) map[string]interface{} { + out := make(map[string]interface{}) + t := v.Type() + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + if field.PkgPath != "" { + // unexported field. skip. + continue + } + out[field.Name] = v.FieldByName(field.Name).Interface() + } + return out +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/i18n.go b/vendor/github.com/nicksnyder/go-i18n/i18n/i18n.go new file mode 100644 index 0000000000..c478ff6ea1 --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/i18n.go @@ -0,0 +1,158 @@ +// Package i18n supports string translations with variable substitution and CLDR pluralization. +// It is intended to be used in conjunction with the goi18n command, although that is not strictly required. +// +// Initialization +// +// Your Go program should load translations during its initialization. +// i18n.MustLoadTranslationFile("path/to/fr-FR.all.json") +// If your translations are in a file format not supported by (Must)?LoadTranslationFile, +// then you can use the AddTranslation function to manually add translations. +// +// Fetching a translation +// +// Use Tfunc or MustTfunc to fetch a TranslateFunc that will return the translated string for a specific language. +// func handleRequest(w http.ResponseWriter, r *http.Request) { +// cookieLang := r.Cookie("lang") +// acceptLang := r.Header.Get("Accept-Language") +// defaultLang = "en-US" // known valid language +// T, err := i18n.Tfunc(cookieLang, acceptLang, defaultLang) +// fmt.Println(T("Hello world")) +// } +// +// Usually it is a good idea to identify strings by a generic id rather than the English translation, +// but the rest of this documentation will continue to use the English translation for readability. +// T("Hello world") // ok +// T("programGreeting") // better! +// +// Variables +// +// TranslateFunc supports strings that have variables using the text/template syntax. +// T("Hello {{.Person}}", map[string]interface{}{ +// "Person": "Bob", +// }) +// +// Pluralization +// +// TranslateFunc supports the pluralization of strings using the CLDR pluralization rules defined here: +// http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html +// T("You have {{.Count}} unread emails.", 2) +// T("I am {{.Count}} meters tall.", "1.7") +// +// Plural strings may also have variables. +// T("{{.Person}} has {{.Count}} unread emails", 2, map[string]interface{}{ +// "Person": "Bob", +// }) +// +// Sentences with multiple plural components can be supported with nesting. +// T("{{.Person}} has {{.Count}} unread emails in the past {{.Timeframe}}.", 3, map[string]interface{}{ +// "Person": "Bob", +// "Timeframe": T("{{.Count}} days", 2), +// }) +// +// Templates +// +// You can use the .Funcs() method of a text/template or html/template to register a TranslateFunc +// for usage inside of that template. +package i18n + +import ( + "github.com/nicksnyder/go-i18n/i18n/bundle" + "github.com/nicksnyder/go-i18n/i18n/language" + "github.com/nicksnyder/go-i18n/i18n/translation" +) + +// TranslateFunc returns the translation of the string identified by translationID. +// +// If there is no translation for translationID, then the translationID itself is returned. +// This makes it easy to identify missing translations in your app. +// +// If translationID is a non-plural form, then the first variadic argument may be a map[string]interface{} +// or struct that contains template data. +// +// If translationID is a plural form, the function accepts two parameter signatures +// 1. T(count int, data struct{}) +// The first variadic argument must be an integer type +// (int, int8, int16, int32, int64) or a float formatted as a string (e.g. "123.45"). +// The second variadic argument may be a map[string]interface{} or struct{} that contains template data. +// 2. T(data struct{}) +// data must be a struct{} or map[string]interface{} that contains a Count field and the template data, +// Count field must be an integer type (int, int8, int16, int32, int64) +// or a float formatted as a string (e.g. "123.45"). +type TranslateFunc func(translationID string, args ...interface{}) string + +// IdentityTfunc returns a TranslateFunc that always returns the translationID passed to it. +// +// It is a useful placeholder when parsing a text/template or html/template +// before the actual Tfunc is available. +func IdentityTfunc() TranslateFunc { + return func(translationID string, args ...interface{}) string { + return translationID + } +} + +var defaultBundle = bundle.New() + +// MustLoadTranslationFile is similar to LoadTranslationFile +// except it panics if an error happens. +func MustLoadTranslationFile(filename string) { + defaultBundle.MustLoadTranslationFile(filename) +} + +// LoadTranslationFile loads the translations from filename into memory. +// +// The language that the translations are associated with is parsed from the filename (e.g. en-US.json). +// +// Generally you should load translation files once during your program's initialization. +func LoadTranslationFile(filename string) error { + return defaultBundle.LoadTranslationFile(filename) +} + +// ParseTranslationFileBytes is similar to LoadTranslationFile except it parses the bytes in buf. +// +// It is useful for parsing translation files embedded with go-bindata. +func ParseTranslationFileBytes(filename string, buf []byte) error { + return defaultBundle.ParseTranslationFileBytes(filename, buf) +} + +// AddTranslation adds translations for a language. +// +// It is useful if your translations are in a format not supported by LoadTranslationFile. +func AddTranslation(lang *language.Language, translations ...translation.Translation) { + defaultBundle.AddTranslation(lang, translations...) +} + +// LanguageTags returns the tags of all languages that have been added. +func LanguageTags() []string { + return defaultBundle.LanguageTags() +} + +// LanguageTranslationIDs returns the ids of all translations that have been added for a given language. +func LanguageTranslationIDs(languageTag string) []string { + return defaultBundle.LanguageTranslationIDs(languageTag) +} + +// MustTfunc is similar to Tfunc except it panics if an error happens. +func MustTfunc(languageSource string, languageSources ...string) TranslateFunc { + return TranslateFunc(defaultBundle.MustTfunc(languageSource, languageSources...)) +} + +// Tfunc returns a TranslateFunc that will be bound to the first language which +// has a non-zero number of translations. +// +// It can parse languages from Accept-Language headers (RFC 2616). +func Tfunc(languageSource string, languageSources ...string) (TranslateFunc, error) { + tfunc, err := defaultBundle.Tfunc(languageSource, languageSources...) + return TranslateFunc(tfunc), err +} + +// MustTfuncAndLanguage is similar to TfuncAndLanguage except it panics if an error happens. +func MustTfuncAndLanguage(languageSource string, languageSources ...string) (TranslateFunc, *language.Language) { + tfunc, lang := defaultBundle.MustTfuncAndLanguage(languageSource, languageSources...) + return TranslateFunc(tfunc), lang +} + +// TfuncAndLanguage is similar to Tfunc except it also returns the language which TranslateFunc is bound to. +func TfuncAndLanguage(languageSource string, languageSources ...string) (TranslateFunc, *language.Language, error) { + tfunc, lang, err := defaultBundle.TfuncAndLanguage(languageSource, languageSources...) + return TranslateFunc(tfunc), lang, err +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/language/language.go b/vendor/github.com/nicksnyder/go-i18n/i18n/language/language.go new file mode 100644 index 0000000000..b045a27519 --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/language/language.go @@ -0,0 +1,99 @@ +// Package language defines languages that implement CLDR pluralization. +package language + +import ( + "fmt" + "strings" +) + +// Language is a written human language. +type Language struct { + // Tag uniquely identifies the language as defined by RFC 5646. + // + // Most language tags are a two character language code (ISO 639-1) + // optionally followed by a dash and a two character country code (ISO 3166-1). + // (e.g. en, pt-br) + Tag string + *PluralSpec +} + +func (l *Language) String() string { + return l.Tag +} + +// MatchingTags returns the set of language tags that map to this Language. +// e.g. "zh-hans-cn" yields {"zh", "zh-hans", "zh-hans-cn"} +// BUG: This should be computed once and stored as a field on Language for efficiency, +// but this would require changing how Languages are constructed. +func (l *Language) MatchingTags() []string { + parts := strings.Split(l.Tag, "-") + var prefix, matches []string + for _, part := range parts { + prefix = append(prefix, part) + match := strings.Join(prefix, "-") + matches = append(matches, match) + } + return matches +} + +// Parse returns a slice of supported languages found in src or nil if none are found. +// It can parse language tags and Accept-Language headers. +func Parse(src string) []*Language { + var langs []*Language + start := 0 + for end, chr := range src { + switch chr { + case ',', ';', '.': + tag := strings.TrimSpace(src[start:end]) + if spec := GetPluralSpec(tag); spec != nil { + langs = append(langs, &Language{NormalizeTag(tag), spec}) + } + start = end + 1 + } + } + if start > 0 { + tag := strings.TrimSpace(src[start:]) + if spec := GetPluralSpec(tag); spec != nil { + langs = append(langs, &Language{NormalizeTag(tag), spec}) + } + return dedupe(langs) + } + if spec := GetPluralSpec(src); spec != nil { + langs = append(langs, &Language{NormalizeTag(src), spec}) + } + return langs +} + +func dedupe(langs []*Language) []*Language { + found := make(map[string]struct{}, len(langs)) + deduped := make([]*Language, 0, len(langs)) + for _, lang := range langs { + if _, ok := found[lang.Tag]; !ok { + found[lang.Tag] = struct{}{} + deduped = append(deduped, lang) + } + } + return deduped +} + +// MustParse is similar to Parse except it panics instead of retuning a nil Language. +func MustParse(src string) []*Language { + langs := Parse(src) + if len(langs) == 0 { + panic(fmt.Errorf("unable to parse language from %q", src)) + } + return langs +} + +// Add adds support for a new language. +func Add(l *Language) { + tag := NormalizeTag(l.Tag) + pluralSpecs[tag] = l.PluralSpec +} + +// NormalizeTag returns a language tag with all lower-case characters +// and dashes "-" instead of underscores "_" +func NormalizeTag(tag string) string { + tag = strings.ToLower(tag) + return strings.Replace(tag, "_", "-", -1) +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/language/operands.go b/vendor/github.com/nicksnyder/go-i18n/i18n/language/operands.go new file mode 100644 index 0000000000..49ee7dc7c1 --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/language/operands.go @@ -0,0 +1,119 @@ +package language + +import ( + "fmt" + "strconv" + "strings" +) + +// http://unicode.org/reports/tr35/tr35-numbers.html#Operands +type Operands struct { + N float64 // absolute value of the source number (integer and decimals) + I int64 // integer digits of n + V int64 // number of visible fraction digits in n, with trailing zeros + W int64 // number of visible fraction digits in n, without trailing zeros + F int64 // visible fractional digits in n, with trailing zeros + T int64 // visible fractional digits in n, without trailing zeros +} + +// NmodEqualAny returns true if o represents an integer equal to any of the arguments. +func (o *Operands) NequalsAny(any ...int64) bool { + for _, i := range any { + if o.I == i && o.T == 0 { + return true + } + } + return false +} + +// NmodEqualAny returns true if o represents an integer equal to any of the arguments modulo mod. +func (o *Operands) NmodEqualsAny(mod int64, any ...int64) bool { + modI := o.I % mod + for _, i := range any { + if modI == i && o.T == 0 { + return true + } + } + return false +} + +// NmodInRange returns true if o represents an integer in the closed interval [from, to]. +func (o *Operands) NinRange(from, to int64) bool { + return o.T == 0 && from <= o.I && o.I <= to +} + +// NmodInRange returns true if o represents an integer in the closed interval [from, to] modulo mod. +func (o *Operands) NmodInRange(mod, from, to int64) bool { + modI := o.I % mod + return o.T == 0 && from <= modI && modI <= to +} + +func newOperands(v interface{}) (*Operands, error) { + switch v := v.(type) { + case int: + return newOperandsInt64(int64(v)), nil + case int8: + return newOperandsInt64(int64(v)), nil + case int16: + return newOperandsInt64(int64(v)), nil + case int32: + return newOperandsInt64(int64(v)), nil + case int64: + return newOperandsInt64(v), nil + case string: + return newOperandsString(v) + case float32, float64: + return nil, fmt.Errorf("floats should be formatted into a string") + default: + return nil, fmt.Errorf("invalid type %T; expected integer or string", v) + } +} + +func newOperandsInt64(i int64) *Operands { + if i < 0 { + i = -i + } + return &Operands{float64(i), i, 0, 0, 0, 0} +} + +func newOperandsString(s string) (*Operands, error) { + if s[0] == '-' { + s = s[1:] + } + n, err := strconv.ParseFloat(s, 64) + if err != nil { + return nil, err + } + ops := &Operands{N: n} + parts := strings.SplitN(s, ".", 2) + ops.I, err = strconv.ParseInt(parts[0], 10, 64) + if err != nil { + return nil, err + } + if len(parts) == 1 { + return ops, nil + } + fraction := parts[1] + ops.V = int64(len(fraction)) + for i := ops.V - 1; i >= 0; i-- { + if fraction[i] != '0' { + ops.W = i + 1 + break + } + } + if ops.V > 0 { + f, err := strconv.ParseInt(fraction, 10, 0) + if err != nil { + return nil, err + } + ops.F = f + } + if ops.W > 0 { + t, err := strconv.ParseInt(fraction[:ops.W], 10, 0) + if err != nil { + return nil, err + } + ops.T = t + } + return ops, nil +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/language/plural.go b/vendor/github.com/nicksnyder/go-i18n/i18n/language/plural.go new file mode 100644 index 0000000000..1f3ea5c69b --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/language/plural.go @@ -0,0 +1,40 @@ +package language + +import ( + "fmt" +) + +// Plural represents a language pluralization form as defined here: +// http://cldr.unicode.org/index/cldr-spec/plural-rules +type Plural string + +// All defined plural categories. +const ( + Invalid Plural = "invalid" + Zero = "zero" + One = "one" + Two = "two" + Few = "few" + Many = "many" + Other = "other" +) + +// NewPlural returns src as a Plural +// or Invalid and a non-nil error if src is not a valid Plural. +func NewPlural(src string) (Plural, error) { + switch src { + case "zero": + return Zero, nil + case "one": + return One, nil + case "two": + return Two, nil + case "few": + return Few, nil + case "many": + return Many, nil + case "other": + return Other, nil + } + return Invalid, fmt.Errorf("invalid plural category %s", src) +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec.go b/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec.go new file mode 100644 index 0000000000..fc31e8807d --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec.go @@ -0,0 +1,75 @@ +package language + +import "strings" + +// PluralSpec defines the CLDR plural rules for a language. +// http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html +// http://unicode.org/reports/tr35/tr35-numbers.html#Operands +type PluralSpec struct { + Plurals map[Plural]struct{} + PluralFunc func(*Operands) Plural +} + +var pluralSpecs = make(map[string]*PluralSpec) + +func normalizePluralSpecID(id string) string { + id = strings.Replace(id, "_", "-", -1) + id = strings.ToLower(id) + return id +} + +// RegisterPluralSpec registers a new plural spec for the language ids. +func RegisterPluralSpec(ids []string, ps *PluralSpec) { + for _, id := range ids { + id = normalizePluralSpecID(id) + pluralSpecs[id] = ps + } +} + +// Plural returns the plural category for number as defined by +// the language's CLDR plural rules. +func (ps *PluralSpec) Plural(number interface{}) (Plural, error) { + ops, err := newOperands(number) + if err != nil { + return Invalid, err + } + return ps.PluralFunc(ops), nil +} + +// GetPluralSpec returns the PluralSpec that matches the longest prefix of tag. +// It returns nil if no PluralSpec matches tag. +func GetPluralSpec(tag string) *PluralSpec { + tag = NormalizeTag(tag) + subtag := tag + for { + if spec := pluralSpecs[subtag]; spec != nil { + return spec + } + end := strings.LastIndex(subtag, "-") + if end == -1 { + return nil + } + subtag = subtag[:end] + } +} + +func newPluralSet(plurals ...Plural) map[Plural]struct{} { + set := make(map[Plural]struct{}, len(plurals)) + for _, plural := range plurals { + set[plural] = struct{}{} + } + return set +} + +func intInRange(i, from, to int64) bool { + return from <= i && i <= to +} + +func intEqualsAny(i int64, any ...int64) bool { + for _, a := range any { + if i == a { + return true + } + } + return false +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec_gen.go b/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec_gen.go new file mode 100644 index 0000000000..0268bb92c0 --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/language/pluralspec_gen.go @@ -0,0 +1,557 @@ +package language + +// This file is generated by i18n/language/codegen/generate.sh + +func init() { + + RegisterPluralSpec([]string{"bm", "bo", "dz", "id", "ig", "ii", "in", "ja", "jbo", "jv", "jw", "kde", "kea", "km", "ko", "lkt", "lo", "ms", "my", "nqo", "root", "sah", "ses", "sg", "th", "to", "vi", "wo", "yo", "yue", "zh"}, &PluralSpec{ + Plurals: newPluralSet(Other), + PluralFunc: func(ops *Operands) Plural { + return Other + }, + }) + RegisterPluralSpec([]string{"am", "as", "bn", "fa", "gu", "hi", "kn", "mr", "zu"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0 or n = 1 + if intEqualsAny(ops.I, 0) || + ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ff", "fr", "hy", "kab"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0,1 + if intEqualsAny(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"pt"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0..1 + if intInRange(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ast", "ca", "de", "en", "et", "fi", "fy", "gl", "it", "ji", "nl", "sv", "sw", "ur", "yi"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"si"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0,1 or i = 0 and f = 1 + if ops.NequalsAny(0, 1) || + intEqualsAny(ops.I, 0) && intEqualsAny(ops.F, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ak", "bh", "guw", "ln", "mg", "nso", "pa", "ti", "wa"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0..1 + if ops.NinRange(0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"tzm"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0..1 or n = 11..99 + if ops.NinRange(0, 1) || + ops.NinRange(11, 99) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"af", "asa", "az", "bem", "bez", "bg", "brx", "ce", "cgg", "chr", "ckb", "dv", "ee", "el", "eo", "es", "eu", "fo", "fur", "gsw", "ha", "haw", "hu", "jgo", "jmc", "ka", "kaj", "kcg", "kk", "kkj", "kl", "ks", "ksb", "ku", "ky", "lb", "lg", "mas", "mgo", "ml", "mn", "nah", "nb", "nd", "ne", "nn", "nnh", "no", "nr", "ny", "nyn", "om", "or", "os", "pap", "ps", "rm", "rof", "rwk", "saq", "sdh", "seh", "sn", "so", "sq", "ss", "ssy", "st", "syr", "ta", "te", "teo", "tig", "tk", "tn", "tr", "ts", "ug", "uz", "ve", "vo", "vun", "wae", "xh", "xog"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"da"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 or t != 0 and i = 0,1 + if ops.NequalsAny(1) || + !intEqualsAny(ops.T, 0) && intEqualsAny(ops.I, 0, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"is"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // t = 0 and i % 10 = 1 and i % 100 != 11 or t != 0 + if intEqualsAny(ops.T, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) || + !intEqualsAny(ops.T, 0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"mk"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 or f % 10 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) || + intEqualsAny(ops.F%10, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"fil", "tl"}, &PluralSpec{ + Plurals: newPluralSet(One, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i = 1,2,3 or v = 0 and i % 10 != 4,6,9 or v != 0 and f % 10 != 4,6,9 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I, 1, 2, 3) || + intEqualsAny(ops.V, 0) && !intEqualsAny(ops.I%10, 4, 6, 9) || + !intEqualsAny(ops.V, 0) && !intEqualsAny(ops.F%10, 4, 6, 9) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"lv", "prg"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 0 or n % 100 = 11..19 or v = 2 and f % 100 = 11..19 + if ops.NmodEqualsAny(10, 0) || + ops.NmodInRange(100, 11, 19) || + intEqualsAny(ops.V, 2) && intInRange(ops.F%100, 11, 19) { + return Zero + } + // n % 10 = 1 and n % 100 != 11 or v = 2 and f % 10 = 1 and f % 100 != 11 or v != 2 and f % 10 = 1 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11) || + intEqualsAny(ops.V, 2) && intEqualsAny(ops.F%10, 1) && !intEqualsAny(ops.F%100, 11) || + !intEqualsAny(ops.V, 2) && intEqualsAny(ops.F%10, 1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"lag"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // i = 0,1 and n != 0 + if intEqualsAny(ops.I, 0, 1) && !ops.NequalsAny(0) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"ksh"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + return Other + }, + }) + RegisterPluralSpec([]string{"iu", "kw", "naq", "se", "sma", "smi", "smj", "smn", "sms"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + return Other + }, + }) + RegisterPluralSpec([]string{"shi"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 0 or n = 1 + if intEqualsAny(ops.I, 0) || + ops.NequalsAny(1) { + return One + } + // n = 2..10 + if ops.NinRange(2, 10) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"mo", "ro"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // v != 0 or n = 0 or n != 1 and n % 100 = 1..19 + if !intEqualsAny(ops.V, 0) || + ops.NequalsAny(0) || + !ops.NequalsAny(1) && ops.NmodInRange(100, 1, 19) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"bs", "hr", "sh", "sr"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 and i % 100 != 11 or f % 10 = 1 and f % 100 != 11 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) || + intEqualsAny(ops.F%10, 1) && !intEqualsAny(ops.F%100, 11) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) || + intInRange(ops.F%10, 2, 4) && !intInRange(ops.F%100, 12, 14) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"gd"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1,11 + if ops.NequalsAny(1, 11) { + return One + } + // n = 2,12 + if ops.NequalsAny(2, 12) { + return Two + } + // n = 3..10,13..19 + if ops.NinRange(3, 10) || ops.NinRange(13, 19) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"sl"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 100 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 1) { + return One + } + // v = 0 and i % 100 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 2) { + return Two + } + // v = 0 and i % 100 = 3..4 or v != 0 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 3, 4) || + !intEqualsAny(ops.V, 0) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"dsb", "hsb"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 100 = 1 or f % 100 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 1) || + intEqualsAny(ops.F%100, 1) { + return One + } + // v = 0 and i % 100 = 2 or f % 100 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 2) || + intEqualsAny(ops.F%100, 2) { + return Two + } + // v = 0 and i % 100 = 3..4 or f % 100 = 3..4 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 3, 4) || + intInRange(ops.F%100, 3, 4) { + return Few + } + return Other + }, + }) + RegisterPluralSpec([]string{"he", "iw"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // i = 2 and v = 0 + if intEqualsAny(ops.I, 2) && intEqualsAny(ops.V, 0) { + return Two + } + // v = 0 and n != 0..10 and n % 10 = 0 + if intEqualsAny(ops.V, 0) && !ops.NinRange(0, 10) && ops.NmodEqualsAny(10, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"cs", "sk"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // i = 2..4 and v = 0 + if intInRange(ops.I, 2, 4) && intEqualsAny(ops.V, 0) { + return Few + } + // v != 0 + if !intEqualsAny(ops.V, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"pl"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // i = 1 and v = 0 + if intEqualsAny(ops.I, 1) && intEqualsAny(ops.V, 0) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) { + return Few + } + // v = 0 and i != 1 and i % 10 = 0..1 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 12..14 + if intEqualsAny(ops.V, 0) && !intEqualsAny(ops.I, 1) && intInRange(ops.I%10, 0, 1) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 5, 9) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 12, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"be"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11) { + return One + } + // n % 10 = 2..4 and n % 100 != 12..14 + if ops.NmodInRange(10, 2, 4) && !ops.NmodInRange(100, 12, 14) { + return Few + } + // n % 10 = 0 or n % 10 = 5..9 or n % 100 = 11..14 + if ops.NmodEqualsAny(10, 0) || + ops.NmodInRange(10, 5, 9) || + ops.NmodInRange(100, 11, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"lt"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11..19 + if ops.NmodEqualsAny(10, 1) && !ops.NmodInRange(100, 11, 19) { + return One + } + // n % 10 = 2..9 and n % 100 != 11..19 + if ops.NmodInRange(10, 2, 9) && !ops.NmodInRange(100, 11, 19) { + return Few + } + // f != 0 + if !intEqualsAny(ops.F, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"mt"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 0 or n % 100 = 2..10 + if ops.NequalsAny(0) || + ops.NmodInRange(100, 2, 10) { + return Few + } + // n % 100 = 11..19 + if ops.NmodInRange(100, 11, 19) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ru", "uk"}, &PluralSpec{ + Plurals: newPluralSet(One, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 and i % 100 != 11 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) && !intEqualsAny(ops.I%100, 11) { + return One + } + // v = 0 and i % 10 = 2..4 and i % 100 != 12..14 + if intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 2, 4) && !intInRange(ops.I%100, 12, 14) { + return Few + } + // v = 0 and i % 10 = 0 or v = 0 and i % 10 = 5..9 or v = 0 and i % 100 = 11..14 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 0) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%10, 5, 9) || + intEqualsAny(ops.V, 0) && intInRange(ops.I%100, 11, 14) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"br"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n % 10 = 1 and n % 100 != 11,71,91 + if ops.NmodEqualsAny(10, 1) && !ops.NmodEqualsAny(100, 11, 71, 91) { + return One + } + // n % 10 = 2 and n % 100 != 12,72,92 + if ops.NmodEqualsAny(10, 2) && !ops.NmodEqualsAny(100, 12, 72, 92) { + return Two + } + // n % 10 = 3..4,9 and n % 100 != 10..19,70..79,90..99 + if (ops.NmodInRange(10, 3, 4) || ops.NmodEqualsAny(10, 9)) && !(ops.NmodInRange(100, 10, 19) || ops.NmodInRange(100, 70, 79) || ops.NmodInRange(100, 90, 99)) { + return Few + } + // n != 0 and n % 1000000 = 0 + if !ops.NequalsAny(0) && ops.NmodEqualsAny(1000000, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ga"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n = 3..6 + if ops.NinRange(3, 6) { + return Few + } + // n = 7..10 + if ops.NinRange(7, 10) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"gv"}, &PluralSpec{ + Plurals: newPluralSet(One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // v = 0 and i % 10 = 1 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 1) { + return One + } + // v = 0 and i % 10 = 2 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%10, 2) { + return Two + } + // v = 0 and i % 100 = 0,20,40,60,80 + if intEqualsAny(ops.V, 0) && intEqualsAny(ops.I%100, 0, 20, 40, 60, 80) { + return Few + } + // v != 0 + if !intEqualsAny(ops.V, 0) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"ar", "ars"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n % 100 = 3..10 + if ops.NmodInRange(100, 3, 10) { + return Few + } + // n % 100 = 11..99 + if ops.NmodInRange(100, 11, 99) { + return Many + } + return Other + }, + }) + RegisterPluralSpec([]string{"cy"}, &PluralSpec{ + Plurals: newPluralSet(Zero, One, Two, Few, Many, Other), + PluralFunc: func(ops *Operands) Plural { + // n = 0 + if ops.NequalsAny(0) { + return Zero + } + // n = 1 + if ops.NequalsAny(1) { + return One + } + // n = 2 + if ops.NequalsAny(2) { + return Two + } + // n = 3 + if ops.NequalsAny(3) { + return Few + } + // n = 6 + if ops.NequalsAny(6) { + return Many + } + return Other + }, + }) +} diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/translation/plural_translation.go b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/plural_translation.go new file mode 100644 index 0000000000..17c32609ce --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/plural_translation.go @@ -0,0 +1,82 @@ +package translation + +import ( + "github.com/nicksnyder/go-i18n/i18n/language" +) + +type pluralTranslation struct { + id string + templates map[language.Plural]*template +} + +func (pt *pluralTranslation) MarshalInterface() interface{} { + return map[string]interface{}{ + "id": pt.id, + "translation": pt.templates, + } +} + +func (pt *pluralTranslation) MarshalFlatInterface() interface{} { + return pt.templates +} + +func (pt *pluralTranslation) ID() string { + return pt.id +} + +func (pt *pluralTranslation) Template(pc language.Plural) *template { + return pt.templates[pc] +} + +func (pt *pluralTranslation) UntranslatedCopy() Translation { + return &pluralTranslation{pt.id, make(map[language.Plural]*template)} +} + +func (pt *pluralTranslation) Normalize(l *language.Language) Translation { + // Delete plural categories that don't belong to this language. + for pc := range pt.templates { + if _, ok := l.Plurals[pc]; !ok { + delete(pt.templates, pc) + } + } + // Create map entries for missing valid categories. + for pc := range l.Plurals { + if _, ok := pt.templates[pc]; !ok { + pt.templates[pc] = mustNewTemplate("") + } + } + return pt +} + +func (pt *pluralTranslation) Backfill(src Translation) Translation { + for pc, t := range pt.templates { + if (t == nil || t.src == "") && src != nil { + pt.templates[pc] = src.Template(language.Other) + } + } + return pt +} + +func (pt *pluralTranslation) Merge(t Translation) Translation { + other, ok := t.(*pluralTranslation) + if !ok || pt.ID() != t.ID() { + return t + } + for pluralCategory, template := range other.templates { + if template != nil && template.src != "" { + pt.templates[pluralCategory] = template + } + } + return pt +} + +func (pt *pluralTranslation) Incomplete(l *language.Language) bool { + for pc := range l.Plurals { + if t := pt.templates[pc]; t == nil || t.src == "" { + return true + } + } + return false +} + +var _ = Translation(&pluralTranslation{}) diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/translation/single_translation.go b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/single_translation.go new file mode 100644 index 0000000000..a76c8c941a --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/single_translation.go @@ -0,0 +1,61 @@ +package translation + +import ( + "github.com/nicksnyder/go-i18n/i18n/language" +) + +type singleTranslation struct { + id string + template *template +} + +func (st *singleTranslation) MarshalInterface() interface{} { + return map[string]interface{}{ + "id": st.id, + "translation": st.template, + } +} + +func (st *singleTranslation) MarshalFlatInterface() interface{} { + return map[string]interface{}{"other": st.template} +} + +func (st *singleTranslation) ID() string { + return st.id +} + +func (st *singleTranslation) Template(pc language.Plural) *template { + return st.template +} + +func (st *singleTranslation) UntranslatedCopy() Translation { + return &singleTranslation{st.id, mustNewTemplate("")} +} + +func (st *singleTranslation) Normalize(language *language.Language) Translation { + return st +} + +func (st *singleTranslation) Backfill(src Translation) Translation { + if (st.template == nil || st.template.src == "") && src != nil { + st.template = src.Template(language.Other) + } + return st +} + +func (st *singleTranslation) Merge(t Translation) Translation { + other, ok := t.(*singleTranslation) + if !ok || st.ID() != t.ID() { + return t + } + if other.template != nil && other.template.src != "" { + st.template = other.template + } + return st +} + +func (st *singleTranslation) Incomplete(l *language.Language) bool { + return st.template == nil || st.template.src == "" +} + +var _ = Translation(&singleTranslation{}) diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/translation/template.go b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/template.go new file mode 100644 index 0000000000..3310150c09 --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/template.go @@ -0,0 +1,65 @@ +package translation + +import ( + "bytes" + "encoding" + "strings" + gotemplate "text/template" +) + +type template struct { + tmpl *gotemplate.Template + src string +} + +func newTemplate(src string) (*template, error) { + if src == "" { + return new(template), nil + } + + var tmpl template + err := tmpl.parseTemplate(src) + return &tmpl, err +} + +func mustNewTemplate(src string) *template { + t, err := newTemplate(src) + if err != nil { + panic(err) + } + return t +} + +func (t *template) String() string { + return t.src +} + +func (t *template) Execute(args interface{}) string { + if t.tmpl == nil { + return t.src + } + var buf bytes.Buffer + if err := t.tmpl.Execute(&buf, args); err != nil { + return err.Error() + } + return buf.String() +} + +func (t *template) MarshalText() ([]byte, error) { + return []byte(t.src), nil +} + +func (t *template) UnmarshalText(src []byte) error { + return t.parseTemplate(string(src)) +} + +func (t *template) parseTemplate(src string) (err error) { + t.src = src + if strings.Contains(src, "{{") { + t.tmpl, err = gotemplate.New(src).Parse(src) + } + return +} + +var _ = encoding.TextMarshaler(&template{}) +var _ = encoding.TextUnmarshaler(&template{}) diff --git a/vendor/github.com/nicksnyder/go-i18n/i18n/translation/translation.go b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/translation.go new file mode 100644 index 0000000000..197514623f --- /dev/null +++ b/vendor/github.com/nicksnyder/go-i18n/i18n/translation/translation.go @@ -0,0 +1,84 @@ +// Package translation defines the interface for a translation. +package translation + +import ( + "fmt" + + "github.com/nicksnyder/go-i18n/i18n/language" +) + +// Translation is the interface that represents a translated string. +type Translation interface { + // MarshalInterface returns the object that should be used + // to serialize the translation. + MarshalInterface() interface{} + MarshalFlatInterface() interface{} + ID() string + Template(language.Plural) *template + UntranslatedCopy() Translation + Normalize(language *language.Language) Translation + Backfill(src Translation) Translation + Merge(Translation) Translation + Incomplete(l *language.Language) bool +} + +// SortableByID implements sort.Interface for a slice of translations. +type SortableByID []Translation + +func (a SortableByID) Len() int { return len(a) } +func (a SortableByID) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a SortableByID) Less(i, j int) bool { return a[i].ID() < a[j].ID() } + +// NewTranslation reflects on data to create a new Translation. +// +// data["id"] must be a string and data["translation"] must be either a string +// for a non-plural translation or a map[string]interface{} for a plural translation. +func NewTranslation(data map[string]interface{}) (Translation, error) { + id, ok := data["id"].(string) + if !ok { + return nil, fmt.Errorf(`missing "id" key`) + } + var pluralObject map[string]interface{} + switch translation := data["translation"].(type) { + case string: + tmpl, err := newTemplate(translation) + if err != nil { + return nil, err + } + return &singleTranslation{id, tmpl}, nil + case map[interface{}]interface{}: + // The YAML parser uses interface{} keys so we first convert them to string keys. + pluralObject = make(map[string]interface{}) + for k, v := range translation { + kStr, ok := k.(string) + if !ok { + return nil, fmt.Errorf(`invalid plural category type %T; expected string`, k) + } + pluralObject[kStr] = v + } + case map[string]interface{}: + pluralObject = translation + case nil: + return nil, fmt.Errorf(`missing "translation" key`) + default: + return nil, fmt.Errorf(`unsupported type for "translation" key %T`, translation) + } + + templates := make(map[language.Plural]*template, len(pluralObject)) + for k, v := range pluralObject { + pc, err := language.NewPlural(k) + if err != nil { + return nil, err + } + str, ok := v.(string) + if !ok { + return nil, fmt.Errorf(`plural category "%s" has value of type %T; expected string`, pc, v) + } + tmpl, err := newTemplate(str) + if err != nil { + return nil, err + } + templates[pc] = tmpl + } + return &pluralTranslation{id, templates}, nil +} diff --git a/vendor/github.com/russross/blackfriday/LICENSE.txt b/vendor/github.com/russross/blackfriday/LICENSE.txt deleted file mode 100644 index 2885af3602..0000000000 --- a/vendor/github.com/russross/blackfriday/LICENSE.txt +++ /dev/null @@ -1,29 +0,0 @@ -Blackfriday is distributed under the Simplified BSD License: - -> Copyright © 2011 Russ Ross -> All rights reserved. -> -> Redistribution and use in source and binary forms, with or without -> modification, are permitted provided that the following conditions -> are met: -> -> 1. Redistributions of source code must retain the above copyright -> notice, this list of conditions and the following disclaimer. -> -> 2. Redistributions in binary form must reproduce the above -> copyright notice, this list of conditions and the following -> disclaimer in the documentation and/or other materials provided with -> the distribution. -> -> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -> POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/README.md b/vendor/github.com/russross/blackfriday/README.md deleted file mode 100644 index 3c113cb102..0000000000 --- a/vendor/github.com/russross/blackfriday/README.md +++ /dev/null @@ -1,276 +0,0 @@ -Blackfriday [![Build Status](https://travis-ci.org/russross/blackfriday.svg?branch=master)](https://travis-ci.org/russross/blackfriday) [![GoDoc](https://godoc.org/github.com/russross/blackfriday?status.svg)](https://godoc.org/github.com/russross/blackfriday) -=========== - -Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It -is paranoid about its input (so you can safely feed it user-supplied -data), it is fast, it supports common extensions (tables, smart -punctuation substitutions, etc.), and it is safe for all utf-8 -(unicode) input. - -HTML output is currently supported, along with Smartypants -extensions. An experimental LaTeX output engine is also included. - -It started as a translation from C of [Sundown][3]. - - -Installation ------------- - -Blackfriday is compatible with Go 1. If you are using an older -release of Go, consider using v1.1 of blackfriday, which was based -on the last stable release of Go prior to Go 1. You can find it as a -tagged commit on github. - -With Go 1 and git installed: - - go get github.com/russross/blackfriday - -will download, compile, and install the package into your `$GOPATH` -directory hierarchy. Alternatively, you can achieve the same if you -import it into a project: - - import "github.com/russross/blackfriday" - -and `go get` without parameters. - -Usage ------ - -For basic usage, it is as simple as getting your input into a byte -slice and calling: - - output := blackfriday.MarkdownBasic(input) - -This renders it with no extensions enabled. To get a more useful -feature set, use this instead: - - output := blackfriday.MarkdownCommon(input) - -### Sanitize untrusted content - -Blackfriday itself does nothing to protect against malicious content. If you are -dealing with user-supplied markdown, we recommend running blackfriday's output -through HTML sanitizer such as -[Bluemonday](https://github.com/microcosm-cc/bluemonday). - -Here's an example of simple usage of blackfriday together with bluemonday: - -``` go -import ( - "github.com/microcosm-cc/bluemonday" - "github.com/russross/blackfriday" -) - -// ... -unsafe := blackfriday.MarkdownCommon(input) -html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) -``` - -### Custom options - -If you want to customize the set of options, first get a renderer -(currently either the HTML or LaTeX output engines), then use it to -call the more general `Markdown` function. For examples, see the -implementations of `MarkdownBasic` and `MarkdownCommon` in -`markdown.go`. - -You can also check out `blackfriday-tool` for a more complete example -of how to use it. Download and install it using: - - go get github.com/russross/blackfriday-tool - -This is a simple command-line tool that allows you to process a -markdown file using a standalone program. You can also browse the -source directly on github if you are just looking for some example -code: - -* - -Note that if you have not already done so, installing -`blackfriday-tool` will be sufficient to download and install -blackfriday in addition to the tool itself. The tool binary will be -installed in `$GOPATH/bin`. This is a statically-linked binary that -can be copied to wherever you need it without worrying about -dependencies and library versions. - - -Features --------- - -All features of Sundown are supported, including: - -* **Compatibility**. The Markdown v1.0.3 test suite passes with - the `--tidy` option. Without `--tidy`, the differences are - mostly in whitespace and entity escaping, where blackfriday is - more consistent and cleaner. - -* **Common extensions**, including table support, fenced code - blocks, autolinks, strikethroughs, non-strict emphasis, etc. - -* **Safety**. Blackfriday is paranoid when parsing, making it safe - to feed untrusted user input without fear of bad things - happening. The test suite stress tests this and there are no - known inputs that make it crash. If you find one, please let me - know and send me the input that does it. - - NOTE: "safety" in this context means *runtime safety only*. In order to - protect yourself against JavaScript injection in untrusted content, see - [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). - -* **Fast processing**. It is fast enough to render on-demand in - most web applications without having to cache the output. - -* **Thread safety**. You can run multiple parsers in different - goroutines without ill effect. There is no dependence on global - shared state. - -* **Minimal dependencies**. Blackfriday only depends on standard - library packages in Go. The source code is pretty - self-contained, so it is easy to add to any project, including - Google App Engine projects. - -* **Standards compliant**. Output successfully validates using the - W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. - - -Extensions ----------- - -In addition to the standard markdown syntax, this package -implements the following extensions: - -* **Intra-word emphasis supression**. The `_` character is - commonly used inside words when discussing code, so having - markdown interpret it as an emphasis command is usually the - wrong thing. Blackfriday lets you treat all emphasis markers as - normal characters when they occur inside a word. - -* **Tables**. Tables can be created by drawing them in the input - using a simple syntax: - - ``` - Name | Age - --------|------ - Bob | 27 - Alice | 23 - ``` - -* **Fenced code blocks**. In addition to the normal 4-space - indentation to mark code blocks, you can explicitly mark them - and supply a language (to make syntax highlighting simple). Just - mark it like this: - - ``` go - func getTrue() bool { - return true - } - ``` - - You can use 3 or more backticks to mark the beginning of the - block, and the same number to mark the end of the block. - - To preserve classes of fenced code blocks while using the bluemonday - HTML sanitizer, use the following policy: - - ``` go - p := bluemonday.UGCPolicy() - p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code") - html := p.SanitizeBytes(unsafe) - ``` - -* **Definition lists**. A simple definition list is made of a single-line - term followed by a colon and the definition for that term. - - Cat - : Fluffy animal everyone likes - - Internet - : Vector of transmission for pictures of cats - - Terms must be separated from the previous definition by a blank line. - -* **Footnotes**. A marker in the text that will become a superscript number; - a footnote definition that will be placed in a list of footnotes at the - end of the document. A footnote looks like this: - - This is a footnote.[^1] - - [^1]: the footnote text. - -* **Autolinking**. Blackfriday can find URLs that have not been - explicitly marked as links and turn them into links. - -* **Strikethrough**. Use two tildes (`~~`) to mark text that - should be crossed out. - -* **Hard line breaks**. With this extension enabled (it is off by - default in the `MarkdownBasic` and `MarkdownCommon` convenience - functions), newlines in the input translate into line breaks in - the output. - -* **Smart quotes**. Smartypants-style punctuation substitution is - supported, turning normal double- and single-quote marks into - curly quotes, etc. - -* **LaTeX-style dash parsing** is an additional option, where `--` - is translated into `–`, and `---` is translated into - `—`. This differs from most smartypants processors, which - turn a single hyphen into an ndash and a double hyphen into an - mdash. - -* **Smart fractions**, where anything that looks like a fraction - is translated into suitable HTML (instead of just a few special - cases like most smartypant processors). For example, `4/5` - becomes `45`, which renders as - 45. - - -Other renderers ---------------- - -Blackfriday is structured to allow alternative rendering engines. Here -are a few of note: - -* [github_flavored_markdown](https://godoc.org/github.com/shurcooL/github_flavored_markdown): - provides a GitHub Flavored Markdown renderer with fenced code block - highlighting, clickable header anchor links. - - It's not customizable, and its goal is to produce HTML output - equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), - except the rendering is performed locally. - -* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, - but for markdown. - -* LaTeX output: renders output as LaTeX. This is currently part of the - main Blackfriday repository, but may be split into its own project - in the future. If you are interested in owning and maintaining the - LaTeX output component, please be in touch. - - It renders some basic documents, but is only experimental at this - point. In particular, it does not do any inline escaping, so input - that happens to look like LaTeX code will be passed through without - modification. - -* [Md2Vim](https://github.com/FooSoft/md2vim): transforms markdown files into vimdoc format. - - -Todo ----- - -* More unit testing -* Improve unicode support. It does not understand all unicode - rules (about what constitutes a letter, a punctuation symbol, - etc.), so it may fail to detect word boundaries correctly in - some instances. It is safe on all utf-8 input. - - -License -------- - -[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) - - - [1]: http://daringfireball.net/projects/markdown/ "Markdown" - [2]: http://golang.org/ "Go Language" - [3]: https://github.com/vmg/sundown "Sundown" diff --git a/vendor/github.com/russross/blackfriday/block.go b/vendor/github.com/russross/blackfriday/block.go deleted file mode 100644 index 9cf451f0b3..0000000000 --- a/vendor/github.com/russross/blackfriday/block.go +++ /dev/null @@ -1,1430 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// Functions to parse block-level elements. -// - -package blackfriday - -import ( - "bytes" - - "github.com/shurcooL/sanitized_anchor_name" -) - -// Parse block-level data. -// Note: this function and many that it calls assume that -// the input buffer ends with a newline. -func (p *parser) block(out *bytes.Buffer, data []byte) { - if len(data) == 0 || data[len(data)-1] != '\n' { - panic("block input is missing terminating newline") - } - - // this is called recursively: enforce a maximum depth - if p.nesting >= p.maxNesting { - return - } - p.nesting++ - - // parse out one block-level construct at a time - for len(data) > 0 { - // prefixed header: - // - // # Header 1 - // ## Header 2 - // ... - // ###### Header 6 - if p.isPrefixHeader(data) { - data = data[p.prefixHeader(out, data):] - continue - } - - // block of preformatted HTML: - // - //
- // ... - //
- if data[0] == '<' { - if i := p.html(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // title block - // - // % stuff - // % more stuff - // % even more stuff - if p.flags&EXTENSION_TITLEBLOCK != 0 { - if data[0] == '%' { - if i := p.titleBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - } - - // blank lines. note: returns the # of bytes to skip - if i := p.isEmpty(data); i > 0 { - data = data[i:] - continue - } - - // indented code block: - // - // func max(a, b int) int { - // if a > b { - // return a - // } - // return b - // } - if p.codePrefix(data) > 0 { - data = data[p.code(out, data):] - continue - } - - // fenced code block: - // - // ``` go - // func fact(n int) int { - // if n <= 1 { - // return n - // } - // return n * fact(n-1) - // } - // ``` - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // horizontal rule: - // - // ------ - // or - // ****** - // or - // ______ - if p.isHRule(data) { - p.r.HRule(out) - var i int - for i = 0; data[i] != '\n'; i++ { - } - data = data[i:] - continue - } - - // block quote: - // - // > A big quote I found somewhere - // > on the web - if p.quotePrefix(data) > 0 { - data = data[p.quote(out, data):] - continue - } - - // table: - // - // Name | Age | Phone - // ------|-----|--------- - // Bob | 31 | 555-1234 - // Alice | 27 | 555-4321 - if p.flags&EXTENSION_TABLES != 0 { - if i := p.table(out, data); i > 0 { - data = data[i:] - continue - } - } - - // an itemized/unordered list: - // - // * Item 1 - // * Item 2 - // - // also works with + or - - if p.uliPrefix(data) > 0 { - data = data[p.list(out, data, 0):] - continue - } - - // a numbered/ordered list: - // - // 1. Item 1 - // 2. Item 2 - if p.oliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_ORDERED):] - continue - } - - // definition lists: - // - // Term 1 - // : Definition a - // : Definition b - // - // Term 2 - // : Definition c - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_DEFINITION):] - continue - } - } - - // anything else must look like a normal paragraph - // note: this finds underlined headers, too - data = data[p.paragraph(out, data):] - } - - p.nesting-- -} - -func (p *parser) isPrefixHeader(data []byte) bool { - if data[0] != '#' { - return false - } - - if p.flags&EXTENSION_SPACE_HEADERS != 0 { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - if data[level] != ' ' { - return false - } - } - return true -} - -func (p *parser) prefixHeader(out *bytes.Buffer, data []byte) int { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - i := skipChar(data, level, ' ') - end := skipUntilChar(data, i, '\n') - skip := end - id := "" - if p.flags&EXTENSION_HEADER_IDS != 0 { - j, k := 0, 0 - // find start/end of header id - for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { - } - for k = j + 1; k < end && data[k] != '}'; k++ { - } - // extract header id iff found - if j < end && k < end { - id = string(data[j+2 : k]) - end = j - skip = k + 1 - for end > 0 && data[end-1] == ' ' { - end-- - } - } - } - for end > 0 && data[end-1] == '#' { - if isBackslashEscaped(data, end-1) { - break - } - end-- - } - for end > 0 && data[end-1] == ' ' { - end-- - } - if end > i { - if id == "" && p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = sanitized_anchor_name.Create(string(data[i:end])) - } - work := func() bool { - p.inline(out, data[i:end]) - return true - } - p.r.Header(out, work, level, id) - } - return skip -} - -func (p *parser) isUnderlinedHeader(data []byte) int { - // test of level 1 header - if data[0] == '=' { - i := skipChar(data, 1, '=') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 1 - } else { - return 0 - } - } - - // test of level 2 header - if data[0] == '-' { - i := skipChar(data, 1, '-') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 2 - } else { - return 0 - } - } - - return 0 -} - -func (p *parser) titleBlock(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '%' { - return 0 - } - splitData := bytes.Split(data, []byte("\n")) - var i int - for idx, b := range splitData { - if !bytes.HasPrefix(b, []byte("%")) { - i = idx // - 1 - break - } - } - - data = bytes.Join(splitData[0:i], []byte("\n")) - p.r.TitleBlock(out, data) - - return len(data) -} - -func (p *parser) html(out *bytes.Buffer, data []byte, doRender bool) int { - var i, j int - - // identify the opening tag - if data[0] != '<' { - return 0 - } - curtag, tagfound := p.htmlFindTag(data[1:]) - - // handle special cases - if !tagfound { - // check for an HTML comment - if size := p.htmlComment(out, data, doRender); size > 0 { - return size - } - - // check for an
tag - if size := p.htmlHr(out, data, doRender); size > 0 { - return size - } - - // check for HTML CDATA - if size := p.htmlCDATA(out, data, doRender); size > 0 { - return size - } - - // no special case recognized - return 0 - } - - // look for an unindented matching closing tag - // followed by a blank line - found := false - /* - closetag := []byte("\n") - j = len(curtag) + 1 - for !found { - // scan for a closing tag at the beginning of a line - if skip := bytes.Index(data[j:], closetag); skip >= 0 { - j += skip + len(closetag) - } else { - break - } - - // see if it is the only thing on the line - if skip := p.isEmpty(data[j:]); skip > 0 { - // see if it is followed by a blank line/eof - j += skip - if j >= len(data) { - found = true - i = j - } else { - if skip := p.isEmpty(data[j:]); skip > 0 { - j += skip - found = true - i = j - } - } - } - } - */ - - // if not found, try a second pass looking for indented match - // but not if tag is "ins" or "del" (following original Markdown.pl) - if !found && curtag != "ins" && curtag != "del" { - i = 1 - for i < len(data) { - i++ - for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { - i++ - } - - if i+2+len(curtag) >= len(data) { - break - } - - j = p.htmlFindEnd(curtag, data[i-1:]) - - if j > 0 { - i += j - 1 - found = true - break - } - } - } - - if !found { - return 0 - } - - // the end of the block has been found - if doRender { - // trim newlines - end := i - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - - return i -} - -func (p *parser) renderHTMLBlock(out *bytes.Buffer, data []byte, start int, doRender bool) int { - // html block needs to end with a blank line - if i := p.isEmpty(data[start:]); i > 0 { - size := start + i - if doRender { - // trim trailing newlines - end := size - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - return size - } - return 0 -} - -// HTML comment, lax form -func (p *parser) htmlComment(out *bytes.Buffer, data []byte, doRender bool) int { - i := p.inlineHTMLComment(out, data) - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HTML CDATA section -func (p *parser) htmlCDATA(out *bytes.Buffer, data []byte, doRender bool) int { - const cdataTag = "') { - i++ - } - i++ - // no end-of-comment marker - if i >= len(data) { - return 0 - } - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HR, which is the only self-closing block tag considered -func (p *parser) htmlHr(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { - return 0 - } - if data[3] != ' ' && data[3] != '/' && data[3] != '>' { - // not an
tag after all; at least not a valid one - return 0 - } - - i := 3 - for data[i] != '>' && data[i] != '\n' { - i++ - } - - if data[i] == '>' { - return p.renderHTMLBlock(out, data, i+1, doRender) - } - - return 0 -} - -func (p *parser) htmlFindTag(data []byte) (string, bool) { - i := 0 - for isalnum(data[i]) { - i++ - } - key := string(data[:i]) - if _, ok := blockTags[key]; ok { - return key, true - } - return "", false -} - -func (p *parser) htmlFindEnd(tag string, data []byte) int { - // assume data[0] == '<' && data[1] == '/' already tested - - // check if tag is a match - closetag := []byte("") - if !bytes.HasPrefix(data, closetag) { - return 0 - } - i := len(closetag) - - // check that the rest of the line is blank - skip := 0 - if skip = p.isEmpty(data[i:]); skip == 0 { - return 0 - } - i += skip - skip = 0 - - if i >= len(data) { - return i - } - - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - return i - } - if skip = p.isEmpty(data[i:]); skip == 0 { - // following line must be blank - return 0 - } - - return i + skip -} - -func (*parser) isEmpty(data []byte) int { - // it is okay to call isEmpty on an empty buffer - if len(data) == 0 { - return 0 - } - - var i int - for i = 0; i < len(data) && data[i] != '\n'; i++ { - if data[i] != ' ' && data[i] != '\t' { - return 0 - } - } - return i + 1 -} - -func (*parser) isHRule(data []byte) bool { - i := 0 - - // skip up to three spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // look at the hrule char - if data[i] != '*' && data[i] != '-' && data[i] != '_' { - return false - } - c := data[i] - - // the whole line must be the char or whitespace - n := 0 - for data[i] != '\n' { - switch { - case data[i] == c: - n++ - case data[i] != ' ': - return false - } - i++ - } - - return n >= 3 -} - -// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, -// and returns the end index if so, or 0 otherwise. It also returns the marker found. -// If syntax is not nil, it gets set to the syntax specified in the fence line. -// A final newline is mandatory to recognize the fence line, unless newlineOptional is true. -func isFenceLine(data []byte, syntax *string, oldmarker string, newlineOptional bool) (end int, marker string) { - i, size := 0, 0 - - // skip up to three spaces - for i < len(data) && i < 3 && data[i] == ' ' { - i++ - } - - // check for the marker characters: ~ or ` - if i >= len(data) { - return 0, "" - } - if data[i] != '~' && data[i] != '`' { - return 0, "" - } - - c := data[i] - - // the whole line must be the same char or whitespace - for i < len(data) && data[i] == c { - size++ - i++ - } - - // the marker char must occur at least 3 times - if size < 3 { - return 0, "" - } - marker = string(data[i-size : i]) - - // if this is the end marker, it must match the beginning marker - if oldmarker != "" && marker != oldmarker { - return 0, "" - } - - // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here - // into one, always get the syntax, and discard it if the caller doesn't care. - if syntax != nil { - syn := 0 - i = skipChar(data, i, ' ') - - if i >= len(data) { - if newlineOptional && i == len(data) { - return i, marker - } - return 0, "" - } - - syntaxStart := i - - if data[i] == '{' { - i++ - syntaxStart++ - - for i < len(data) && data[i] != '}' && data[i] != '\n' { - syn++ - i++ - } - - if i >= len(data) || data[i] != '}' { - return 0, "" - } - - // strip all whitespace at the beginning and the end - // of the {} block - for syn > 0 && isspace(data[syntaxStart]) { - syntaxStart++ - syn-- - } - - for syn > 0 && isspace(data[syntaxStart+syn-1]) { - syn-- - } - - i++ - } else { - for i < len(data) && !isspace(data[i]) { - syn++ - i++ - } - } - - *syntax = string(data[syntaxStart : syntaxStart+syn]) - } - - i = skipChar(data, i, ' ') - if i >= len(data) || data[i] != '\n' { - if newlineOptional && i == len(data) { - return i, marker - } - return 0, "" - } - - return i + 1, marker // Take newline into account. -} - -// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, -// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. -// If doRender is true, a final newline is mandatory to recognize the fenced code block. -func (p *parser) fencedCodeBlock(out *bytes.Buffer, data []byte, doRender bool) int { - var syntax string - beg, marker := isFenceLine(data, &syntax, "", false) - if beg == 0 || beg >= len(data) { - return 0 - } - - var work bytes.Buffer - - for { - // safe to assume beg < len(data) - - // check for the end of the code block - newlineOptional := !doRender - fenceEnd, _ := isFenceLine(data[beg:], nil, marker, newlineOptional) - if fenceEnd != 0 { - beg += fenceEnd - break - } - - // copy the current line - end := skipUntilChar(data, beg, '\n') + 1 - - // did we reach the end of the buffer without a closing marker? - if end >= len(data) { - return 0 - } - - // verbatim copy to the working buffer - if doRender { - work.Write(data[beg:end]) - } - beg = end - } - - if doRender { - p.r.BlockCode(out, work.Bytes(), syntax) - } - - return beg -} - -func (p *parser) table(out *bytes.Buffer, data []byte) int { - var header bytes.Buffer - i, columns := p.tableHeader(&header, data) - if i == 0 { - return 0 - } - - var body bytes.Buffer - - for i < len(data) { - pipes, rowStart := 0, i - for ; data[i] != '\n'; i++ { - if data[i] == '|' { - pipes++ - } - } - - if pipes == 0 { - i = rowStart - break - } - - // include the newline in data sent to tableRow - i++ - p.tableRow(&body, data[rowStart:i], columns, false) - } - - p.r.Table(out, header.Bytes(), body.Bytes(), columns) - - return i -} - -// check if the specified position is preceded by an odd number of backslashes -func isBackslashEscaped(data []byte, i int) bool { - backslashes := 0 - for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { - backslashes++ - } - return backslashes&1 == 1 -} - -func (p *parser) tableHeader(out *bytes.Buffer, data []byte) (size int, columns []int) { - i := 0 - colCount := 1 - for i = 0; data[i] != '\n'; i++ { - if data[i] == '|' && !isBackslashEscaped(data, i) { - colCount++ - } - } - - // doesn't look like a table header - if colCount == 1 { - return - } - - // include the newline in the data sent to tableRow - header := data[:i+1] - - // column count ignores pipes at beginning or end of line - if data[0] == '|' { - colCount-- - } - if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { - colCount-- - } - - columns = make([]int, colCount) - - // move on to the header underline - i++ - if i >= len(data) { - return - } - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - i = skipChar(data, i, ' ') - - // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 - // and trailing | optional on last column - col := 0 - for data[i] != '\n' { - dashes := 0 - - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_LEFT - dashes++ - } - for data[i] == '-' { - i++ - dashes++ - } - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_RIGHT - dashes++ - } - for data[i] == ' ' { - i++ - } - - // end of column test is messy - switch { - case dashes < 3: - // not a valid column - return - - case data[i] == '|' && !isBackslashEscaped(data, i): - // marker found, now skip past trailing whitespace - col++ - i++ - for data[i] == ' ' { - i++ - } - - // trailing junk found after last column - if col >= colCount && data[i] != '\n' { - return - } - - case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: - // something else found where marker was required - return - - case data[i] == '\n': - // marker is optional for the last column - col++ - - default: - // trailing junk found after last column - return - } - } - if col != colCount { - return - } - - p.tableRow(out, header, columns, true) - size = i + 1 - return -} - -func (p *parser) tableRow(out *bytes.Buffer, data []byte, columns []int, header bool) { - i, col := 0, 0 - var rowWork bytes.Buffer - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - - for col = 0; col < len(columns) && i < len(data); col++ { - for data[i] == ' ' { - i++ - } - - cellStart := i - - for (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { - i++ - } - - cellEnd := i - - // skip the end-of-cell marker, possibly taking us past end of buffer - i++ - - for cellEnd > cellStart && data[cellEnd-1] == ' ' { - cellEnd-- - } - - var cellWork bytes.Buffer - p.inline(&cellWork, data[cellStart:cellEnd]) - - if header { - p.r.TableHeaderCell(&rowWork, cellWork.Bytes(), columns[col]) - } else { - p.r.TableCell(&rowWork, cellWork.Bytes(), columns[col]) - } - } - - // pad it out with empty columns to get the right number - for ; col < len(columns); col++ { - if header { - p.r.TableHeaderCell(&rowWork, nil, columns[col]) - } else { - p.r.TableCell(&rowWork, nil, columns[col]) - } - } - - // silently ignore rows with too many cells - - p.r.TableRow(out, rowWork.Bytes()) -} - -// returns blockquote prefix length -func (p *parser) quotePrefix(data []byte) int { - i := 0 - for i < 3 && data[i] == ' ' { - i++ - } - if data[i] == '>' { - if data[i+1] == ' ' { - return i + 2 - } - return i + 1 - } - return 0 -} - -// blockquote ends with at least one blank line -// followed by something without a blockquote prefix -func (p *parser) terminateBlockquote(data []byte, beg, end int) bool { - if p.isEmpty(data[beg:]) <= 0 { - return false - } - if end >= len(data) { - return true - } - return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 -} - -// parse a blockquote fragment -func (p *parser) quote(out *bytes.Buffer, data []byte) int { - var raw bytes.Buffer - beg, end := 0, 0 - for beg < len(data) { - end = beg - // Step over whole lines, collecting them. While doing that, check for - // fenced code and if one's found, incorporate it altogether, - // irregardless of any contents inside it - for data[end] != '\n' { - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data[end:], false); i > 0 { - // -1 to compensate for the extra end++ after the loop: - end += i - 1 - break - } - } - end++ - } - end++ - - if pre := p.quotePrefix(data[beg:]); pre > 0 { - // skip the prefix - beg += pre - } else if p.terminateBlockquote(data, beg, end) { - break - } - - // this line is part of the blockquote - raw.Write(data[beg:end]) - beg = end - } - - var cooked bytes.Buffer - p.block(&cooked, raw.Bytes()) - p.r.BlockQuote(out, cooked.Bytes()) - return end -} - -// returns prefix length for block code -func (p *parser) codePrefix(data []byte) int { - if data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { - return 4 - } - return 0 -} - -func (p *parser) code(out *bytes.Buffer, data []byte) int { - var work bytes.Buffer - - i := 0 - for i < len(data) { - beg := i - for data[i] != '\n' { - i++ - } - i++ - - blankline := p.isEmpty(data[beg:i]) > 0 - if pre := p.codePrefix(data[beg:i]); pre > 0 { - beg += pre - } else if !blankline { - // non-empty, non-prefixed line breaks the pre - i = beg - break - } - - // verbatim copy to the working buffeu - if blankline { - work.WriteByte('\n') - } else { - work.Write(data[beg:i]) - } - } - - // trim all the \n off the end of work - workbytes := work.Bytes() - eol := len(workbytes) - for eol > 0 && workbytes[eol-1] == '\n' { - eol-- - } - if eol != len(workbytes) { - work.Truncate(eol) - } - - work.WriteByte('\n') - - p.r.BlockCode(out, work.Bytes(), "") - - return i -} - -// returns unordered list item prefix -func (p *parser) uliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // need a *, +, or - followed by a space - if (data[i] != '*' && data[i] != '+' && data[i] != '-') || - data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns ordered list item prefix -func (p *parser) oliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // count the digits - start := i - for data[i] >= '0' && data[i] <= '9' { - i++ - } - - // we need >= 1 digits followed by a dot and a space - if start == i || data[i] != '.' || data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns definition list item prefix -func (p *parser) dliPrefix(data []byte) int { - i := 0 - - // need a : followed by a spaces - if data[i] != ':' || data[i+1] != ' ' { - return 0 - } - for data[i] == ' ' { - i++ - } - return i + 2 -} - -// parse ordered or unordered list block -func (p *parser) list(out *bytes.Buffer, data []byte, flags int) int { - i := 0 - flags |= LIST_ITEM_BEGINNING_OF_LIST - work := func() bool { - for i < len(data) { - skip := p.listItem(out, data[i:], &flags) - i += skip - - if skip == 0 || flags&LIST_ITEM_END_OF_LIST != 0 { - break - } - flags &= ^LIST_ITEM_BEGINNING_OF_LIST - } - return true - } - - p.r.List(out, work, flags) - return i -} - -// Parse a single list item. -// Assumes initial prefix is already removed if this is a sublist. -func (p *parser) listItem(out *bytes.Buffer, data []byte, flags *int) int { - // keep track of the indentation of the first line - itemIndent := 0 - for itemIndent < 3 && data[itemIndent] == ' ' { - itemIndent++ - } - - i := p.uliPrefix(data) - if i == 0 { - i = p.oliPrefix(data) - } - if i == 0 { - i = p.dliPrefix(data) - // reset definition term flag - if i > 0 { - *flags &= ^LIST_TYPE_TERM - } - } - if i == 0 { - // if in defnition list, set term flag and continue - if *flags&LIST_TYPE_DEFINITION != 0 { - *flags |= LIST_TYPE_TERM - } else { - return 0 - } - } - - // skip leading whitespace on first line - for data[i] == ' ' { - i++ - } - - // find the end of the line - line := i - for i > 0 && data[i-1] != '\n' { - i++ - } - - // get working buffer - var raw bytes.Buffer - - // put the first line into the working buffer - raw.Write(data[line:i]) - line = i - - // process the following lines - containsBlankLine := false - sublist := 0 - -gatherlines: - for line < len(data) { - i++ - - // find the end of this line - for data[i-1] != '\n' { - i++ - } - - // if it is an empty line, guess that it is part of this item - // and move on to the next line - if p.isEmpty(data[line:i]) > 0 { - containsBlankLine = true - raw.Write(data[line:i]) - line = i - continue - } - - // calculate the indentation - indent := 0 - for indent < 4 && line+indent < i && data[line+indent] == ' ' { - indent++ - } - - chunk := data[line+indent : i] - - // evaluate how this line fits in - switch { - // is this a nested list item? - case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || - p.oliPrefix(chunk) > 0 || - p.dliPrefix(chunk) > 0: - - if containsBlankLine { - // end the list if the type changed after a blank line - if indent <= itemIndent && - ((*flags&LIST_TYPE_ORDERED != 0 && p.uliPrefix(chunk) > 0) || - (*flags&LIST_TYPE_ORDERED == 0 && p.oliPrefix(chunk) > 0)) { - - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - // to be a nested list, it must be indented more - // if not, it is the next item in the same list - if indent <= itemIndent { - break gatherlines - } - - // is this the first item in the nested list? - if sublist == 0 { - sublist = raw.Len() - } - - // is this a nested prefix header? - case p.isPrefixHeader(chunk): - // if the header is not indented, it is not nested in the list - // and thus ends the list - if containsBlankLine && indent < 4 { - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - - // anything following an empty line is only part - // of this item if it is indented 4 spaces - // (regardless of the indentation of the beginning of the item) - case containsBlankLine && indent < 4: - if *flags&LIST_TYPE_DEFINITION != 0 && i < len(data)-1 { - // is the next item still a part of this list? - next := i - for data[next] != '\n' { - next++ - } - for next < len(data)-1 && data[next] == '\n' { - next++ - } - if i < len(data)-1 && data[i] != ':' && data[next] != ':' { - *flags |= LIST_ITEM_END_OF_LIST - } - } else { - *flags |= LIST_ITEM_END_OF_LIST - } - break gatherlines - - // a blank line means this should be parsed as a block - case containsBlankLine: - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - containsBlankLine = false - - // add the line into the working buffer without prefix - raw.Write(data[line+indent : i]) - - line = i - } - - // If reached end of data, the Renderer.ListItem call we're going to make below - // is definitely the last in the list. - if line >= len(data) { - *flags |= LIST_ITEM_END_OF_LIST - } - - rawBytes := raw.Bytes() - - // render the contents of the list item - var cooked bytes.Buffer - if *flags&LIST_ITEM_CONTAINS_BLOCK != 0 && *flags&LIST_TYPE_TERM == 0 { - // intermediate render of block item, except for definition term - if sublist > 0 { - p.block(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.block(&cooked, rawBytes) - } - } else { - // intermediate render of inline item - if sublist > 0 { - p.inline(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.inline(&cooked, rawBytes) - } - } - - // render the actual list item - cookedBytes := cooked.Bytes() - parsedEnd := len(cookedBytes) - - // strip trailing newlines - for parsedEnd > 0 && cookedBytes[parsedEnd-1] == '\n' { - parsedEnd-- - } - p.r.ListItem(out, cookedBytes[:parsedEnd], *flags) - - return line -} - -// render a single paragraph that has already been parsed out -func (p *parser) renderParagraph(out *bytes.Buffer, data []byte) { - if len(data) == 0 { - return - } - - // trim leading spaces - beg := 0 - for data[beg] == ' ' { - beg++ - } - - // trim trailing newline - end := len(data) - 1 - - // trim trailing spaces - for end > beg && data[end-1] == ' ' { - end-- - } - - work := func() bool { - p.inline(out, data[beg:end]) - return true - } - p.r.Paragraph(out, work) -} - -func (p *parser) paragraph(out *bytes.Buffer, data []byte) int { - // prev: index of 1st char of previous line - // line: index of 1st char of current line - // i: index of cursor/end of current line - var prev, line, i int - - // keep going until we find something to mark the end of the paragraph - for i < len(data) { - // mark the beginning of the current line - prev = line - current := data[i:] - line = i - - // did we find a blank line marking the end of the paragraph? - if n := p.isEmpty(current); n > 0 { - // did this blank line followed by a definition list item? - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if i < len(data)-1 && data[i+1] == ':' { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - p.renderParagraph(out, data[:i]) - return i + n - } - - // an underline under some text marks a header, so our paragraph ended on prev line - if i > 0 { - if level := p.isUnderlinedHeader(current); level > 0 { - // render the paragraph - p.renderParagraph(out, data[:prev]) - - // ignore leading and trailing whitespace - eol := i - 1 - for prev < eol && data[prev] == ' ' { - prev++ - } - for eol > prev && data[eol-1] == ' ' { - eol-- - } - - // render the header - // this ugly double closure avoids forcing variables onto the heap - work := func(o *bytes.Buffer, pp *parser, d []byte) func() bool { - return func() bool { - pp.inline(o, d) - return true - } - }(out, p, data[prev:eol]) - - id := "" - if p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = sanitized_anchor_name.Create(string(data[prev:eol])) - } - - p.r.Header(out, work, level, id) - - // find the end of the underline - for data[i] != '\n' { - i++ - } - return i - } - } - - // if the next line starts a block of HTML, then the paragraph ends here - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - if data[i] == '<' && p.html(out, current, false) > 0 { - // rewind to before the HTML block - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a prefixed header or a horizontal rule after this, paragraph is over - if p.isPrefixHeader(current) || p.isHRule(current) { - p.renderParagraph(out, data[:i]) - return i - } - - // if there's a fenced code block, paragraph is over - if p.flags&EXTENSION_FENCED_CODE != 0 { - if p.fencedCodeBlock(out, current, false) > 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a definition list item, prev line is a definition term - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(current) != 0 { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - // if there's a list after this, paragraph is over - if p.flags&EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK != 0 { - if p.uliPrefix(current) != 0 || - p.oliPrefix(current) != 0 || - p.quotePrefix(current) != 0 || - p.codePrefix(current) != 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // otherwise, scan to the beginning of the next line - for data[i] != '\n' { - i++ - } - i++ - } - - p.renderParagraph(out, data[:i]) - return i -} diff --git a/vendor/github.com/russross/blackfriday/html.go b/vendor/github.com/russross/blackfriday/html.go deleted file mode 100644 index 74e67ee82b..0000000000 --- a/vendor/github.com/russross/blackfriday/html.go +++ /dev/null @@ -1,949 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// -// HTML rendering backend -// -// - -package blackfriday - -import ( - "bytes" - "fmt" - "regexp" - "strconv" - "strings" -) - -// Html renderer configuration options. -const ( - HTML_SKIP_HTML = 1 << iota // skip preformatted HTML blocks - HTML_SKIP_STYLE // skip embedded